You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by jo...@apache.org on 2014/08/27 16:04:42 UTC

[01/35] git commit: AMBARI-6980. Refactor of 2.1.GlusterFS stack to move global to env (Scott Creeley via subin)

Repository: ambari
Updated Branches:
  refs/heads/branch-alerts-dev 4a4644b88 -> ae8f1e77e


AMBARI-6980. Refactor of 2.1.GlusterFS stack to move global to env (Scott Creeley via subin)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/fefc129b
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/fefc129b
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/fefc129b

Branch: refs/heads/branch-alerts-dev
Commit: fefc129b1a50cdc0c8a07e37893a9b3f6ddda5b9
Parents: 3c13f40
Author: root <ro...@c6501.ambari.apache.org>
Authored: Mon Aug 25 17:49:16 2014 +0000
Committer: root <ro...@c6501.ambari.apache.org>
Committed: Mon Aug 25 17:49:16 2014 +0000

----------------------------------------------------------------------
 .../FALCON/configuration/falcon-env.xml         |  63 ++++++
 .../2.1.GlusterFS/services/FALCON/metainfo.xml  |   2 +-
 .../services/FALCON/package/scripts/params.py   |  32 +--
 .../FALCON/package/scripts/status_params.py     |   2 +-
 .../GLUSTERFS/configuration/core-site.xml       |  26 ---
 .../GLUSTERFS/configuration/hadoop-env.xml      | 207 +++++++++++++++++++
 .../services/GLUSTERFS/metainfo.xml             |   7 +-
 .../2.1.GlusterFS/services/OOZIE/metainfo.xml   |   2 +-
 .../services/STORM/configuration/storm-env.xml  |  39 ++++
 .../2.1.GlusterFS/services/STORM/metainfo.xml   |   2 +-
 .../services/STORM/package/scripts/params.py    |  12 +-
 .../STORM/package/scripts/status_params.py      |   2 +-
 .../services/TEZ/configuration/tez-env.xml      |  29 +++
 .../HDP/2.1.GlusterFS/services/TEZ/metainfo.xml |   2 +-
 .../services/TEZ/package/scripts/params.py      |   4 +-
 .../services/YARN/configuration/global.xml      |  64 ------
 .../services/YARN/configuration/yarn-env.xml    | 181 ++++++++++++++++
 .../2.1.GlusterFS/services/YARN/metainfo.xml    |   3 +-
 .../services/YARN/package/scripts/params.py     |  41 ++--
 .../YARN/package/scripts/status_params.py       |   8 +-
 ambari-web/app/data/HDP2/site_properties.js     | 178 ++++++++++------
 ambari-web/app/data/site_properties.js          | 160 ++++++++------
 ambari-web/app/models/stack_service.js          |   2 +-
 23 files changed, 791 insertions(+), 277 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/fefc129b/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/FALCON/configuration/falcon-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/FALCON/configuration/falcon-env.xml b/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/FALCON/configuration/falcon-env.xml
new file mode 100644
index 0000000..fadc02d
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/FALCON/configuration/falcon-env.xml
@@ -0,0 +1,63 @@
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+<!--
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+-->
+<configuration supports_final="false">
+  <property>
+    <name>falcon_user</name>
+    <value>falcon</value>
+    <description>Falcon user.</description>
+  </property>
+  <property>
+    <name>falcon_port</name>
+    <value>15000</value>
+    <description>Port the Falcon Server listens on.</description>
+  </property>
+  <property>
+    <name>falcon_log_dir</name>
+    <value>/var/log/falcon</value>
+    <description>Falcon log directory.</description>
+  </property>
+  <property>
+    <name>falcon_pid_dir</name>
+    <value>/var/run/falcon</value>
+    <description>Falcon pid-file directory.</description>
+  </property>
+  <property>
+    <name>falcon_local_dir</name>
+    <value>/hadoop/falcon</value>
+    <description>Directory where Falcon data, such as activemq data, is stored.</description>
+  </property>
+  <!--embeddedmq properties-->
+  <property>
+    <name>falcon.embeddedmq.data</name>
+    <value>/hadoop/falcon/embeddedmq/data</value>
+    <description>Directory in which embeddedmq data is stored.</description>
+  </property>
+  <property>
+    <name>falcon.embeddedmq</name>
+    <value>true</value>
+    <description>Whether embeddedmq is enabled or not.</description>
+  </property>
+  <property>
+    <name>falcon.emeddedmq.port</name>
+    <value>61616</value>
+    <description>Port that embeddedmq will listen on.</description>
+  </property>
+</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/fefc129b/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/FALCON/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/FALCON/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/FALCON/metainfo.xml
index aa243e3..f66d99f 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/FALCON/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/FALCON/metainfo.xml
@@ -80,7 +80,7 @@
 
       <configuration-dependencies>
         <config-type>oozie-site</config-type>
-        <config-type>global</config-type>
+        <config-type>falcon-env</config-type>
         <config-type>falcon-startup.properties</config-type>
         <config-type>falcon-runtime.properties</config-type>
       </configuration-dependencies>

http://git-wip-us.apache.org/repos/asf/ambari/blob/fefc129b/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/FALCON/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/FALCON/package/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/FALCON/package/scripts/params.py
index 3372675..93e292d 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/FALCON/package/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/FALCON/package/scripts/params.py
@@ -23,29 +23,30 @@ from status_params import *
 
 config = Script.get_config()
 
-oozie_user = config['configurations']['global']['oozie_user']
-falcon_user = config['configurations']['global']['falcon_user']
-smoke_user =  config['configurations']['global']['smokeuser']
+oozie_user = config['configurations']['oozie-env']['oozie_user']
+falcon_user = config['configurations']['falcon-env']['falcon_user']
+smoke_user =  config['configurations']['hadoop-env']['smokeuser']
 
-user_group = config['configurations']['global']['user_group']
-proxyuser_group =  config['configurations']['global']['proxyuser_group']
+user_group = config['configurations']['hadoop-env']['user_group']
+proxyuser_group =  config['configurations']['hadoop-env']['proxyuser_group']
 
 java_home = config['hostLevelParams']['java_home']
 falcon_home = '/usr/lib/falcon'
 falcon_conf_dir = '/etc/falcon/conf'
-falcon_local_dir = config['configurations']['global']['falcon_local_dir']
-falcon_log_dir = config['configurations']['global']['falcon_log_dir']
+falcon_local_dir = config['configurations']['falcon-env']['falcon_local_dir']
+falcon_log_dir = config['configurations']['falcon-env']['falcon_log_dir']
 store_uri = config['configurations']['falcon-startup.properties']['*.config.store.uri']
 
-falcon_embeddedmq_data = config['configurations']['global']['falcon.embeddedmq.data']
-falcon_embeddedmq_enabled = config['configurations']['global']['falcon.embeddedmq']
-falcon_emeddedmq_port = config['configurations']['global']['falcon.emeddedmq.port']
+falcon_embeddedmq_data = config['configurations']['falcon-env']['falcon.embeddedmq.data']
+falcon_embeddedmq_enabled = config['configurations']['falcon-env']['falcon.embeddedmq']
+falcon_emeddedmq_port = config['configurations']['falcon-env']['falcon.emeddedmq.port']
 
 falcon_host = config['clusterHostInfo']['falcon_server_hosts'][0]
-falcon_port = config['configurations']['global']['falcon_port']
+falcon_port = config['configurations']['falcon-env']['falcon_port']
 falcon_runtime_properties = config['configurations']['falcon-runtime.properties']
 falcon_startup_properties = config['configurations']['falcon-startup.properties']
-smokeuser_keytab = config['configurations']['global']['smokeuser_keytab']
+smokeuser_keytab = config['configurations']['hadoop-env']['smokeuser_keytab']
+falcon_env_sh_template = config['configurations']['falcon-env']['content']
 
 falcon_webapp_dir = '/var/lib/falcon/webapp'
 flacon_apps_dir = '/apps/falcon'
@@ -54,10 +55,9 @@ _authentication = config['configurations']['core-site']['hadoop.security.authent
 security_enabled = ( not is_empty(_authentication) and _authentication == 'kerberos')
 hostname = config["hostname"]
 hadoop_conf_dir = "/etc/hadoop/conf"
-hdfs_user_keytab = config['configurations']['global']['hdfs_user_keytab']
-hdfs_user = config['configurations']['global']['hdfs_user']
-hdfs_principal_name = config['configurations']['global']['hdfs_principal_name']
-kinit_path_local = functions.get_kinit_path([default("kinit_path_local",None), "/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
+hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab']
+hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
+kinit_path_local = functions.get_kinit_path(["/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
 import functools
 #create partial functions with common arguments for every HdfsDirectory call
 #to create hdfs directory we need to call params.HdfsDirectory in code

http://git-wip-us.apache.org/repos/asf/ambari/blob/fefc129b/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/FALCON/package/scripts/status_params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/FALCON/package/scripts/status_params.py b/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/FALCON/package/scripts/status_params.py
index 7f3aaa0..6ebb35f 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/FALCON/package/scripts/status_params.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/FALCON/package/scripts/status_params.py
@@ -20,5 +20,5 @@ limitations under the License.
 from resource_management import *
 
 config = Script.get_config()
-falcon_pid_dir = config['configurations']['global']['falcon_pid_dir']
+falcon_pid_dir = config['configurations']['falcon-env']['falcon_pid_dir']
 server_pid_file = format('{falcon_pid_dir}/falcon.pid')

http://git-wip-us.apache.org/repos/asf/ambari/blob/fefc129b/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/GLUSTERFS/configuration/core-site.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/GLUSTERFS/configuration/core-site.xml b/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/GLUSTERFS/configuration/core-site.xml
index ea158a5..b1529f2 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/GLUSTERFS/configuration/core-site.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/GLUSTERFS/configuration/core-site.xml
@@ -22,35 +22,9 @@
 
 <configuration supports_final="true" xmlns:xi="http://www.w3.org/2001/XInclude">
 
-<!-- i/o properties -->
-
-  <property>
-    <name>hadoop_heapsize</name>
-    <value>1024</value>
-    <description>Hadoop maximum Java heap size</description>
-  </property>
-
-
 <!-- file system properties -->
 
   <property>
-    <name>fs.defaultFS</name>
-    <!-- cluster variant -->
-    <value>glusterfs:///localhost:8020</value>
-    <description>The name of the default file system.  Either the
-  literal string "local" or a host:port for NDFS.</description>
-    <final>true</final>
-  </property>
-  
-  <property>
-    <name>fs.default.name</name>
-    <!-- cluster variant -->
-    <value>glusterfs:///localhost:8020</value>
-    <description>The name of the default file system.  Either the
-         literal string "local" or a host:port for NDFS.</description>
-  </property>
-
-  <property>
   <name>fs.AbstractFileSystem.glusterfs.impl</name>
   <value>org.apache.hadoop.fs.local.GlusterFs</value>
   </property>

http://git-wip-us.apache.org/repos/asf/ambari/blob/fefc129b/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/GLUSTERFS/configuration/hadoop-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/GLUSTERFS/configuration/hadoop-env.xml b/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/GLUSTERFS/configuration/hadoop-env.xml
new file mode 100644
index 0000000..6b00199
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/GLUSTERFS/configuration/hadoop-env.xml
@@ -0,0 +1,207 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+<!--
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+-->
+
+<configuration supports_final="false">
+  <property>
+    <name>fs.defaultFS</name>
+    <!-- cluster variant -->
+    <value>glusterfs:///localhost:8020</value>
+    <description>The name of the default file system.  Either the
+  literal string "local" or a host:port for NDFS.</description>
+    <final>true</final>
+  </property>
+  <property>
+    <name>fs.default.name</name>
+    <!-- cluster variant -->
+    <value>glusterfs:///localhost:8020</value>
+    <description>The name of the default file system.  Either the
+         literal string "local" or a host:port for NDFS.</description>
+  </property>
+  <property>
+    <name>hadoop_pid_dir_prefix</name>
+    <value>/var/run/hadoop</value>
+    <description>Hadoop PID Dir Prefix</description>
+  </property>
+ <property>
+    <name>hadoop_heapsize</name>
+    <value>1024</value>
+    <description>Hadoop maximum Java heap size</description>
+  </property>
+  <property>
+    <name>glusterfs_user</name>
+    <value>root</value>
+    <description></description>
+  </property>
+  <property>
+    <name>hdfs_log_dir_prefix</name>
+    <value>/var/log/hadoop</value>
+    <description>Hadoop Log Dir Prefix</description>
+  </property>
+  <property>
+    <name>namenode_heapsize</name>
+    <value>1024</value>
+    <description>NameNode Java heap size</description>
+  </property>
+  <property>
+    <name>namenode_host</name>
+    <value></value>
+    <description>NameNode Host.</description>
+  </property>
+  <property>
+    <name>snamenode_host</name>
+    <value></value>
+    <description>Secondary NameNode.</description>
+  </property>
+  <property>
+    <name>proxyuser_group</name>
+    <value>users</value>
+    <description>Proxy user group.</description>
+  </property>
+  <property>
+    <name>hdfs_user</name>
+    <value>hdfs</value>
+    <description>User to run HDFS as</description>
+  </property>
+  <property>
+    <name>user_group</name>
+    <value>hadoop</value>
+    <description>Proxy user group.</description>
+  </property>
+    <!-- hadoop-env.sh -->
+  <property>
+    <name>content</name>
+    <description>This is the jinja template for hadoop-env.sh file</description>
+    <value>
+# Set Hadoop-specific environment variables here.
+
+# The only required environment variable is JAVA_HOME.  All others are
+# optional.  When running a distributed configuration it is best to
+# set JAVA_HOME in this file, so that it is correctly defined on
+# remote nodes.
+
+# The java implementation to use.  Required.
+export JAVA_HOME={{java_home}}
+export HADOOP_HOME_WARN_SUPPRESS=1
+
+# Hadoop home directory
+export HADOOP_HOME=${HADOOP_HOME:-/usr/lib/hadoop}
+
+# Hadoop Configuration Directory
+#TODO: if env var set that can cause problems
+export HADOOP_CONF_DIR=${HADOOP_CONF_DIR:-{{hadoop_conf_dir}}}
+
+{# this is different for HDP1 #}
+# Path to jsvc required by secure HDP 2.0 datanode
+export JSVC_HOME={{jsvc_path}}
+
+
+# The maximum amount of heap to use, in MB. Default is 1000.
+export HADOOP_HEAPSIZE="{{hadoop_heapsize}}"
+
+export HADOOP_NAMENODE_INIT_HEAPSIZE="-Xms{{namenode_heapsize}}"
+
+# Extra Java runtime options.  Empty by default.
+export HADOOP_OPTS="-Djava.net.preferIPv4Stack=true ${HADOOP_OPTS}"
+
+# Command specific options appended to HADOOP_OPTS when specified
+export HADOOP_NAMENODE_OPTS="-server -XX:ParallelGCThreads=8 -XX:+UseConcMarkSweepGC -XX:ErrorFile={{hdfs_log_dir_prefix}}/$USER/hs_err_pid%p.log -XX:NewSize={{namenode_opt_newsize}} -XX:MaxNewSize={{namenode_opt_maxnewsize}} -Xloggc:{{hdfs_log_dir_prefix}}/$USER/gc.log-`date +'%Y%m%d%H%M'` -verbose:gc -XX:+PrintGCDetails -XX:+PrintGCTimeStamps -XX:+PrintGCDateStamps -Xms{{namenode_heapsize}} -Xmx{{namenode_heapsize}} -Dhadoop.security.logger=INFO,DRFAS -Dhdfs.audit.logger=INFO,DRFAAUDIT ${HADOOP_NAMENODE_OPTS}"
+HADOOP_JOBTRACKER_OPTS="-server -XX:ParallelGCThreads=8 -XX:+UseConcMarkSweepGC -XX:ErrorFile={{hdfs_log_dir_prefix}}/$USER/hs_err_pid%p.log -XX:NewSize={{jtnode_opt_newsize}} -XX:MaxNewSize={{jtnode_opt_maxnewsize}} -Xloggc:{{hdfs_log_dir_prefix}}/$USER/gc.log-`date +'%Y%m%d%H%M'` -verbose:gc -XX:+PrintGCDetails -XX:+PrintGCTimeStamps -XX:+PrintGCDateStamps -Xmx{{jtnode_heapsize}} -Dhadoop.security.logger=INFO,DRFAS -Dmapred.audit.logger=INFO,MRAUDIT -Dhadoop.mapreduce.jobsummary.logger=INFO,JSA ${HADOOP_JOBTRACKER_OPTS}"
+
+HADOOP_TASKTRACKER_OPTS="-server -Xmx{{ttnode_heapsize}} -Dhadoop.security.logger=ERROR,console -Dmapred.audit.logger=ERROR,console ${HADOOP_TASKTRACKER_OPTS}"
+HADOOP_DATANODE_OPTS="-Xmx{{dtnode_heapsize}} -Dhadoop.security.logger=ERROR,DRFAS ${HADOOP_DATANODE_OPTS}"
+HADOOP_BALANCER_OPTS="-server -Xmx{{hadoop_heapsize}}m ${HADOOP_BALANCER_OPTS}"
+
+export HADOOP_SECONDARYNAMENODE_OPTS="-server -XX:ParallelGCThreads=8 -XX:+UseConcMarkSweepGC -XX:ErrorFile={{hdfs_log_dir_prefix}}/$USER/hs_err_pid%p.log -XX:NewSize={{namenode_opt_newsize}} -XX:MaxNewSize={{namenode_opt_maxnewsize}} -Xloggc:{{hdfs_log_dir_prefix}}/$USER/gc.log-`date +'%Y%m%d%H%M'` -verbose:gc -XX:+PrintGCDetails -XX:+PrintGCTimeStamps -XX:+PrintGCDateStamps ${HADOOP_NAMENODE_INIT_HEAPSIZE} -Xmx{{namenode_heapsize}} -Dhadoop.security.logger=INFO,DRFAS -Dhdfs.audit.logger=INFO,DRFAAUDIT ${HADOOP_SECONDARYNAMENODE_OPTS}"
+
+# The following applies to multiple commands (fs, dfs, fsck, distcp etc)
+export HADOOP_CLIENT_OPTS="-Xmx${HADOOP_HEAPSIZE}m $HADOOP_CLIENT_OPTS"
+# On secure datanodes, user to run the datanode as after dropping privileges
+export HADOOP_SECURE_DN_USER={{hdfs_user}}
+
+# Extra ssh options.  Empty by default.
+export HADOOP_SSH_OPTS="-o ConnectTimeout=5 -o SendEnv=HADOOP_CONF_DIR"
+
+# Where log files are stored.  $HADOOP_HOME/logs by default.
+export HADOOP_LOG_DIR={{hdfs_log_dir_prefix}}/$USER
+
+# History server logs
+export HADOOP_MAPRED_LOG_DIR={{mapred_log_dir_prefix}}/$USER
+
+# Where log files are stored in the secure data environment.
+export HADOOP_SECURE_DN_LOG_DIR={{hdfs_log_dir_prefix}}/$HADOOP_SECURE_DN_USER
+
+# File naming remote slave hosts.  $HADOOP_HOME/conf/slaves by default.
+# export HADOOP_SLAVES=${HADOOP_HOME}/conf/slaves
+
+# host:path where hadoop code should be rsync'd from.  Unset by default.
+# export HADOOP_MASTER=master:/home/$USER/src/hadoop
+
+# Seconds to sleep between slave commands.  Unset by default.  This
+# can be useful in large clusters, where, e.g., slave rsyncs can
+# otherwise arrive faster than the master can service them.
+# export HADOOP_SLAVE_SLEEP=0.1
+
+# The directory where pid files are stored. /tmp by default.
+export HADOOP_PID_DIR={{hadoop_pid_dir_prefix}}/$USER
+export HADOOP_SECURE_DN_PID_DIR={{hadoop_pid_dir_prefix}}/$HADOOP_SECURE_DN_USER
+
+# History server pid
+export HADOOP_MAPRED_PID_DIR={{mapred_pid_dir_prefix}}/$USER
+
+YARN_RESOURCEMANAGER_OPTS="-Dyarn.server.resourcemanager.appsummary.logger=INFO,RMSUMMARY"
+
+# A string representing this instance of hadoop. $USER by default.
+export HADOOP_IDENT_STRING=$USER
+
+# The scheduling priority for daemon processes.  See 'man nice'.
+
+# export HADOOP_NICENESS=10
+
+# Use libraries from standard classpath
+JAVA_JDBC_LIBS=""
+#Add libraries required by mysql connector
+for jarFile in `ls /usr/share/java/*mysql* 2>/dev/null`
+do
+  JAVA_JDBC_LIBS=${JAVA_JDBC_LIBS}:$jarFile
+done
+#Add libraries required by oracle connector
+for jarFile in `ls /usr/share/java/*ojdbc* 2>/dev/null`
+do
+  JAVA_JDBC_LIBS=${JAVA_JDBC_LIBS}:$jarFile
+done
+#Add libraries required by nodemanager
+MAPREDUCE_LIBS={{mapreduce_libs_path}}
+export HADOOP_CLASSPATH=${HADOOP_CLASSPATH}${JAVA_JDBC_LIBS}:${MAPREDUCE_LIBS}
+
+if [ -d "/usr/lib/tez" ]; then
+  export HADOOP_CLASSPATH=$HADOOP_CLASSPATH:/usr/lib/tez/*:/usr/lib/tez/lib/*:/etc/tez/conf
+fi
+
+# Setting path to hdfs command line
+export HADOOP_LIBEXEC_DIR={{hadoop_libexec_dir}}
+
+#Mostly required for hadoop 2.0
+export JAVA_LIBRARY_PATH=${JAVA_LIBRARY_PATH}:/usr/lib/hadoop/lib/native/Linux-amd64-64
+    </value>
+  </property>
+
+</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/fefc129b/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/GLUSTERFS/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/GLUSTERFS/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/GLUSTERFS/metainfo.xml
index 8908401..6b01ed0 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/GLUSTERFS/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/GLUSTERFS/metainfo.xml
@@ -53,12 +53,9 @@
       </commandScript>
 
       <configuration-dependencies>
-      <!--
-        <config-type>yarn-site</config-type>
-        <config-type>mapred-site</config-type>
-      -->
         <config-type>core-site</config-type>
-        <config-type>global</config-type>
+        <config-type>hadoop-env</config-type>
+        <!--<config-type>hdfs-site</config-type>-->
       </configuration-dependencies>
 
     </service>

http://git-wip-us.apache.org/repos/asf/ambari/blob/fefc129b/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/OOZIE/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/OOZIE/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/OOZIE/metainfo.xml
index 186676f..6a8b98e 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/OOZIE/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/OOZIE/metainfo.xml
@@ -69,7 +69,7 @@
         
       </osSpecifics>
       <configuration-dependencies>
-        <config-type>global</config-type>
+        <config-type>oozie-env</config-type>
         <config-type>oozie-site</config-type>
         <config-type>oozie-log4j</config-type>
       </configuration-dependencies>

http://git-wip-us.apache.org/repos/asf/ambari/blob/fefc129b/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/STORM/configuration/storm-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/STORM/configuration/storm-env.xml b/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/STORM/configuration/storm-env.xml
new file mode 100644
index 0000000..fc65cfa
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/STORM/configuration/storm-env.xml
@@ -0,0 +1,39 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+<!--
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+-->
+
+<configuration supports_final="false">
+  <property>
+    <name>storm_user</name>
+    <value>storm</value>
+    <description></description>
+  </property>
+  <property>
+    <name>storm_log_dir</name>
+    <value>/var/log/storm</value>
+    <description></description>
+  </property>
+  <property>
+    <name>storm_pid_dir</name>
+    <value>/var/run/storm</value>
+    <description></description>
+  </property>
+</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/fefc129b/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/STORM/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/STORM/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/STORM/metainfo.xml
index d344295..14d8b56 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/STORM/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/STORM/metainfo.xml
@@ -109,7 +109,7 @@
 
       <configuration-dependencies>
         <config-type>storm-site</config-type>
-        <config-type>global</config-type>
+        <config-type>tez-env</config-type>
       </configuration-dependencies>
     </service>
   </services>

http://git-wip-us.apache.org/repos/asf/ambari/blob/fefc129b/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/STORM/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/STORM/package/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/STORM/package/scripts/params.py
index fcc8fbc..def5120 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/STORM/package/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/STORM/package/scripts/params.py
@@ -24,12 +24,12 @@ import status_params
 # server configurations
 config = Script.get_config()
 
-storm_user = config['configurations']['global']['storm_user']
-log_dir = config['configurations']['global']['storm_log_dir']
+storm_user = config['configurations']['storm-env']['storm_user']
+log_dir = config['configurations']['storm-env']['storm_log_dir']
 pid_dir = status_params.pid_dir
 conf_dir = "/etc/storm/conf"
 local_dir = config['configurations']['storm-site']['storm.local.dir']
-user_group = config['configurations']['global']['user_group']
+user_group = config['configurations']['hadoop-env']['user_group']
 java64_home = config['hostLevelParams']['java_home']
 nimbus_host = config['configurations']['storm-site']['nimbus.host']
 nimbus_port = config['configurations']['storm-site']['nimbus.thrift.port']
@@ -53,7 +53,7 @@ security_enabled = ( not is_empty(_authentication) and _authentication == 'kerbe
 
 if security_enabled:
   _hostname_lowercase = config['hostname'].lower()
-  _kerberos_domain = config['configurations']['global']['kerberos_domain']
-  _storm_principal_name = config['configurations']['global']['storm_principal_name']
+  _kerberos_domain = config['configurations']['hadoop-env']['kerberos_domain']
+  _storm_principal_name = config['configurations']['storm-env']['storm_principal_name']
   storm_jaas_principal = _storm_principal_name.replace('_HOST',_hostname_lowercase)
-  storm_keytab_path = config['configurations']['global']['storm_keytab']
+  storm_keytab_path = config['configurations']['storm-env']['storm_keytab']

http://git-wip-us.apache.org/repos/asf/ambari/blob/fefc129b/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/STORM/package/scripts/status_params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/STORM/package/scripts/status_params.py b/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/STORM/package/scripts/status_params.py
index 66b2a57..5eaa446 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/STORM/package/scripts/status_params.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/STORM/package/scripts/status_params.py
@@ -21,7 +21,7 @@ from resource_management import *
 
 config = Script.get_config()
 
-pid_dir = config['configurations']['global']['storm_pid_dir']
+pid_dir = config['configurations']['storm-env']['storm_pid_dir']
 pid_nimbus = format("{pid_dir}/nimbus.pid")
 pid_supervisor = format("{pid_dir}/supervisor.pid")
 pid_drpc = format("{pid_dir}/drpc.pid")

http://git-wip-us.apache.org/repos/asf/ambari/blob/fefc129b/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/TEZ/configuration/tez-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/TEZ/configuration/tez-env.xml b/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/TEZ/configuration/tez-env.xml
new file mode 100644
index 0000000..5ae9df1
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/TEZ/configuration/tez-env.xml
@@ -0,0 +1,29 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+<!--
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+-->
+
+<configuration supports_final="false">
+  <property>
+    <name>tez_user</name>
+    <value>tez</value>
+    <description></description>
+  </property>
+</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/fefc129b/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/TEZ/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/TEZ/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/TEZ/metainfo.xml
index ed9cfe3..410266e 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/TEZ/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/TEZ/metainfo.xml
@@ -46,7 +46,7 @@
       </osSpecifics>
 
       <configuration-dependencies>
-        <config-type>global</config-type>
+        <config-type>tez-env</config-type>
         <config-type>tez-site</config-type>
       </configuration-dependencies>
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/fefc129b/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/TEZ/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/TEZ/package/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/TEZ/package/scripts/params.py
index 8bc810a..38bcacf 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/TEZ/package/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/TEZ/package/scripts/params.py
@@ -28,5 +28,5 @@ config_dir = "/etc/tez/conf"
 hadoop_home = '/usr'
 java64_home = config['hostLevelParams']['java_home']
 
-tez_user = config['configurations']['global']['tez_user']
-user_group = config['configurations']['global']['user_group']
\ No newline at end of file
+tez_user = config['configurations']['tez-env']['tez_user']
+user_group = config['configurations']['hadoop-env']['user_group']
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/fefc129b/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/YARN/configuration/global.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/YARN/configuration/global.xml b/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/YARN/configuration/global.xml
deleted file mode 100644
index af6b3e3..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/YARN/configuration/global.xml
+++ /dev/null
@@ -1,64 +0,0 @@
-<?xml version="1.0"?>
-<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
-<!--
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
--->
-
-<configuration supports_final="false">
-  <property>
-    <name>yarn_log_dir_prefix</name>
-    <value>/var/log/hadoop-yarn</value>
-    <description>YARN Log Dir Prefix</description>
-  </property>
-  <property>
-    <name>yarn_pid_dir_prefix</name>
-    <value>/var/run/hadoop-yarn</value>
-    <description>YARN PID Dir Prefix</description>
-  </property>
-  <property>
-    <name>yarn_user</name>
-    <value>yarn</value>
-    <description>YARN User</description>
-  </property>
-  <property>
-    <name>yarn_heapsize</name>
-    <value>1024</value>
-    <description>Max heapsize for all YARN components using a numerical value in the scale of MB</description>
-  </property>
-  <property>
-    <name>resourcemanager_heapsize</name>
-    <value>1024</value>
-    <description>Max heapsize for ResourceManager using a numerical value in the scale of MB</description>
-  </property>
-  <property>
-    <name>nodemanager_heapsize</name>
-    <value>1024</value>
-    <description>Max heapsize for NodeManager using a numerical value in the scale of MB</description>
-  </property>
-  <property>
-    <name>apptimelineserver_heapsize</name>
-    <value>1024</value>
-    <description>Max heapsize for AppTimelineServer using a numerical value in the scale of MB</description>
-  </property>
-  <property>
-    <name>namenode_heapsize</name>
-    <value>1024</value>
-    <description>Max heapsize for NameNode using a numerical value in the scale of MB</description>
-  </property>
-</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/fefc129b/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/YARN/configuration/yarn-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/YARN/configuration/yarn-env.xml b/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/YARN/configuration/yarn-env.xml
new file mode 100644
index 0000000..60109c2
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/YARN/configuration/yarn-env.xml
@@ -0,0 +1,181 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+<!--
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+-->
+
+<configuration supports_final="false">
+  <property>
+    <name>yarn_log_dir_prefix</name>
+    <value>/var/log/hadoop-yarn</value>
+    <description>YARN Log Dir Prefix</description>
+  </property>
+  <property>
+    <name>yarn_pid_dir_prefix</name>
+    <value>/var/run/hadoop-yarn</value>
+    <description>YARN PID Dir Prefix</description>
+  </property>
+  <property>
+    <name>yarn_user</name>
+    <value>yarn</value>
+    <description>YARN User</description>
+  </property>
+  <property>
+    <name>yarn_heapsize</name>
+    <value>1024</value>
+    <description>Max heapsize for all YARN components using a numerical value in the scale of MB</description>
+  </property>
+  <property>
+    <name>resourcemanager_heapsize</name>
+    <value>1024</value>
+    <description>Max heapsize for ResourceManager using a numerical value in the scale of MB</description>
+  </property>
+  <property>
+    <name>nodemanager_heapsize</name>
+    <value>1024</value>
+    <description>Max heapsize for NodeManager using a numerical value in the scale of MB</description>
+  </property>
+  <property>
+    <name>apptimelineserver_heapsize</name>
+    <value>1024</value>
+    <description>Max heapsize for AppTimelineServer using a numerical value in the scale of MB</description>
+  </property>
+  <property>
+    <name>namenode_heapsize</name>
+    <value>1024</value>
+    <description>Max heapsize for NameNode using a numerical value in the scale of MB</description>
+  </property>
+  <!-- yarn-env.sh -->
+  <property>
+    <name>content</name>
+    <description>This is the jinja template for yarn-env.sh file</description>
+    <value>
+export HADOOP_YARN_HOME={{hadoop_yarn_home}}
+export YARN_LOG_DIR={{yarn_log_dir_prefix}}/$USER
+export YARN_PID_DIR={{yarn_pid_dir_prefix}}/$USER
+export HADOOP_LIBEXEC_DIR={{hadoop_libexec_dir}}
+export JAVA_HOME={{java64_home}}
+
+# User for YARN daemons
+export HADOOP_YARN_USER=${HADOOP_YARN_USER:-yarn}
+
+# resolve links - $0 may be a softlink
+export YARN_CONF_DIR="${YARN_CONF_DIR:-$HADOOP_YARN_HOME/conf}"
+
+# some Java parameters
+# export JAVA_HOME=/home/y/libexec/jdk1.6.0/
+if [ "$JAVA_HOME" != "" ]; then
+  #echo "run java in $JAVA_HOME"
+  JAVA_HOME=$JAVA_HOME
+fi
+
+if [ "$JAVA_HOME" = "" ]; then
+  echo "Error: JAVA_HOME is not set."
+  exit 1
+fi
+
+JAVA=$JAVA_HOME/bin/java
+JAVA_HEAP_MAX=-Xmx1000m
+
+# For setting YARN specific HEAP sizes please use this
+# Parameter and set appropriately
+YARN_HEAPSIZE={{yarn_heapsize}}
+
+# check envvars which might override default args
+if [ "$YARN_HEAPSIZE" != "" ]; then
+  JAVA_HEAP_MAX="-Xmx""$YARN_HEAPSIZE""m"
+fi
+
+# Resource Manager specific parameters
+
+# Specify the max Heapsize for the ResourceManager using a numerical value
+# in the scale of MB. For example, to specify an jvm option of -Xmx1000m, set
+# the value to 1000.
+# This value will be overridden by an Xmx setting specified in either YARN_OPTS
+# and/or YARN_RESOURCEMANAGER_OPTS.
+# If not specified, the default value will be picked from either YARN_HEAPMAX
+# or JAVA_HEAP_MAX with YARN_HEAPMAX as the preferred option of the two.
+export YARN_RESOURCEMANAGER_HEAPSIZE={{resourcemanager_heapsize}}
+
+# Specify the JVM options to be used when starting the ResourceManager.
+# These options will be appended to the options specified as YARN_OPTS
+# and therefore may override any similar flags set in YARN_OPTS
+#export YARN_RESOURCEMANAGER_OPTS=
+
+# Node Manager specific parameters
+
+# Specify the max Heapsize for the NodeManager using a numerical value
+# in the scale of MB. For example, to specify an jvm option of -Xmx1000m, set
+# the value to 1000.
+# This value will be overridden by an Xmx setting specified in either YARN_OPTS
+# and/or YARN_NODEMANAGER_OPTS.
+# If not specified, the default value will be picked from either YARN_HEAPMAX
+# or JAVA_HEAP_MAX with YARN_HEAPMAX as the preferred option of the two.
+export YARN_NODEMANAGER_HEAPSIZE={{nodemanager_heapsize}}
+
+# Specify the max Heapsize for the HistoryManager using a numerical value
+# in the scale of MB. For example, to specify an jvm option of -Xmx1000m, set
+# the value to 1024.
+# This value will be overridden by an Xmx setting specified in either YARN_OPTS
+# and/or YARN_HISTORYSERVER_OPTS.
+# If not specified, the default value will be picked from either YARN_HEAPMAX
+# or JAVA_HEAP_MAX with YARN_HEAPMAX as the preferred option of the two.
+export YARN_HISTORYSERVER_HEAPSIZE={{apptimelineserver_heapsize}}
+
+# Specify the JVM options to be used when starting the NodeManager.
+# These options will be appended to the options specified as YARN_OPTS
+# and therefore may override any similar flags set in YARN_OPTS
+#export YARN_NODEMANAGER_OPTS=
+
+# so that filenames w/ spaces are handled correctly in loops below
+IFS=
+
+
+# default log directory and file
+if [ "$YARN_LOG_DIR" = "" ]; then
+  YARN_LOG_DIR="$HADOOP_YARN_HOME/logs"
+fi
+if [ "$YARN_LOGFILE" = "" ]; then
+  YARN_LOGFILE='yarn.log'
+fi
+
+# default policy file for service-level authorization
+if [ "$YARN_POLICYFILE" = "" ]; then
+  YARN_POLICYFILE="hadoop-policy.xml"
+fi
+
+# restore ordinary behaviour
+unset IFS
+
+
+YARN_OPTS="$YARN_OPTS -Dhadoop.log.dir=$YARN_LOG_DIR"
+YARN_OPTS="$YARN_OPTS -Dyarn.log.dir=$YARN_LOG_DIR"
+YARN_OPTS="$YARN_OPTS -Dhadoop.log.file=$YARN_LOGFILE"
+YARN_OPTS="$YARN_OPTS -Dyarn.log.file=$YARN_LOGFILE"
+YARN_OPTS="$YARN_OPTS -Dyarn.home.dir=$YARN_COMMON_HOME"
+YARN_OPTS="$YARN_OPTS -Dyarn.id.str=$YARN_IDENT_STRING"
+YARN_OPTS="$YARN_OPTS -Dhadoop.root.logger=${YARN_ROOT_LOGGER:-INFO,console}"
+YARN_OPTS="$YARN_OPTS -Dyarn.root.logger=${YARN_ROOT_LOGGER:-INFO,console}"
+if [ "x$JAVA_LIBRARY_PATH" != "x" ]; then
+  YARN_OPTS="$YARN_OPTS -Djava.library.path=$JAVA_LIBRARY_PATH"
+fi
+YARN_OPTS="$YARN_OPTS -Dyarn.policy.file=$YARN_POLICYFILE"
+    </value>
+  </property>
+</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/fefc129b/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/YARN/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/YARN/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/YARN/metainfo.xml
index a26b02f..2173b69 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/YARN/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/YARN/metainfo.xml
@@ -123,8 +123,9 @@
 
       <configuration-dependencies>
         <config-type>core-site</config-type>
-        <config-type>global</config-type>
+        <config-type>yarn-env</config-type>
         <config-type>mapred-site</config-type>
+        <config-type>mapred-env</config-type>
         <config-type>mapred-queue-acls</config-type>
       </configuration-dependencies>
     </service>

http://git-wip-us.apache.org/repos/asf/ambari/blob/fefc129b/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/YARN/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/YARN/package/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/YARN/package/scripts/params.py
index 1dda724..500727c 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/YARN/package/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/YARN/package/scripts/params.py
@@ -30,12 +30,12 @@ config_dir = "/etc/hadoop/conf"
 
 mapred_user = status_params.mapred_user
 yarn_user = status_params.yarn_user
-hdfs_user = config['configurations']['global']['hdfs_user']
+hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
 
-smokeuser = config['configurations']['global']['smokeuser']
+smokeuser = config['configurations']['hadoop-env']['smokeuser']
 _authentication = config['configurations']['core-site']['hadoop.security.authentication']
 security_enabled = ( not is_empty(_authentication) and _authentication == 'kerberos')
-smoke_user_keytab = config['configurations']['global']['smokeuser_keytab']
+smoke_user_keytab = config['configurations']['hadoop-env']['smokeuser_keytab']
 yarn_executor_container_group = config['configurations']['yarn-site']['yarn.nodemanager.linux-container-executor.group']
 kinit_path_local = functions.get_kinit_path([default("kinit_path_local",None), "/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
 rm_hosts = config['clusterHostInfo']['rm_host']
@@ -49,14 +49,17 @@ hadoop_ssl_enabled = default("/configurations/core-site/hadoop.ssl.enabled", Fal
 
 hadoop_libexec_dir = '/usr/lib/hadoop/libexec'
 hadoop_yarn_home = '/usr/lib/hadoop-yarn'
-yarn_heapsize = config['configurations']['global']['yarn_heapsize']
-resourcemanager_heapsize = config['configurations']['global']['resourcemanager_heapsize']
-nodemanager_heapsize = config['configurations']['global']['nodemanager_heapsize']
-apptimelineserver_heapsize = default("/configurations/global/apptimelineserver_heapsize", 1024)
-yarn_log_dir_prefix = config['configurations']['global']['yarn_log_dir_prefix']
+yarn_heapsize = config['configurations']['yarn-env']['yarn_heapsize']
+resourcemanager_heapsize = config['configurations']['yarn-env']['resourcemanager_heapsize']
+nodemanager_heapsize = config['configurations']['yarn-env']['nodemanager_heapsize']
+apptimelineserver_heapsize = default("/configurations/yarn-env/apptimelineserver_heapsize", 1024)
+ats_leveldb_dir = config['configurations']['yarn-site']['yarn.timeline-service.leveldb-timeline-store.path']
+yarn_log_dir_prefix = config['configurations']['yarn-env']['yarn_log_dir_prefix']
 yarn_pid_dir_prefix = status_params.yarn_pid_dir_prefix
 mapred_pid_dir_prefix = status_params.mapred_pid_dir_prefix
-mapred_log_dir_prefix = config['configurations']['global']['mapred_log_dir_prefix']
+mapred_log_dir_prefix = config['configurations']['mapred-env']['mapred_log_dir_prefix']
+mapred_env_sh_template = config['configurations']['mapred-env']['content']
+yarn_env_sh_template = config['configurations']['yarn-env']['content']
 
 if len(rm_hosts) > 1:
   additional_rm_host = rm_hosts[1]
@@ -88,7 +91,7 @@ yarn_job_summary_log = format("{yarn_log_dir_prefix}/{yarn_user}/hadoop-mapreduc
 mapred_bin = "/usr/lib/hadoop-mapreduce/sbin"
 yarn_bin = "/usr/lib/hadoop-yarn/sbin"
 
-user_group = config['configurations']['global']['user_group']
+user_group = config['configurations']['hadoop-env']['user_group']
 limits_conf_dir = "/etc/security/limits.d"
 hadoop_conf_dir = "/etc/hadoop/conf"
 yarn_container_bin = "/usr/lib/hadoop-yarn/bin"
@@ -100,8 +103,8 @@ exclude_file_path = config['configurations']['yarn-site']['yarn.resourcemanager.
 hostname = config['hostname']
 
 if security_enabled:
-  nm_principal_name = config['configurations']['global']['nodemanager_principal_name']
-  nodemanager_keytab = config['configurations']['global']['nodemanager_keytab']
+  nm_principal_name = config['configurations']['yarn-site']['nodemanager_principal_name']
+  nodemanager_keytab = config['configurations']['yarn-site']['nodemanager_keytab']
   nodemanager_principal_name = nm_principal_name.replace('_HOST',hostname.lower())
   nm_kinit_cmd = format("{kinit_path_local} -kt {nodemanager_keytab} {nodemanager_principal_name};")
 else:
@@ -115,9 +118,8 @@ mapreduce_jobhistory_done_dir = config['configurations']['mapred-site']['mapredu
 #for create_hdfs_directory
 hostname = config["hostname"]
 hadoop_conf_dir = "/etc/hadoop/conf"
-hdfs_user_keytab = config['configurations']['global']['hdfs_user_keytab']
-hdfs_user = config['configurations']['global']['hdfs_user']
-hdfs_principal_name = config['configurations']['global']['hdfs_principal_name']
+hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab']
+hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
 kinit_path_local = functions.get_kinit_path([default("kinit_path_local",None), "/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
 import functools
 #create partial functions with common arguments for every HdfsDirectory call
@@ -131,3 +133,12 @@ HdfsDirectory = functools.partial(
   kinit_path_local = kinit_path_local
 )
 update_exclude_file_only = config['commandParams']['update_exclude_file_only']
+
+hadoop_bin = "/usr/lib/hadoop/sbin"
+mapred_tt_group = default("/configurations/mapred-site/mapreduce.tasktracker.group", user_group)
+
+#taskcontroller.cfg
+
+mapred_local_dir = "/tmp/hadoop-mapred/mapred/local"
+hdfs_log_dir_prefix = config['configurations']['hadoop-env']['hdfs_log_dir_prefix']
+min_user_id = config['configurations']['yarn-env']['min_user_id']

http://git-wip-us.apache.org/repos/asf/ambari/blob/fefc129b/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/YARN/package/scripts/status_params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/YARN/package/scripts/status_params.py b/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/YARN/package/scripts/status_params.py
index 1a67de7..a3a45be 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/YARN/package/scripts/status_params.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/YARN/package/scripts/status_params.py
@@ -22,10 +22,10 @@ from resource_management import *
 
 config = Script.get_config()
 
-mapred_user = config['configurations']['global']['mapred_user']
-yarn_user = config['configurations']['global']['yarn_user']
-yarn_pid_dir_prefix = config['configurations']['global']['yarn_pid_dir_prefix']
-mapred_pid_dir_prefix = config['configurations']['global']['mapred_pid_dir_prefix']
+mapred_user = config['configurations']['mapred-env']['mapred_user']
+yarn_user = config['configurations']['yarn-env']['yarn_user']
+yarn_pid_dir_prefix = config['configurations']['yarn-env']['yarn_pid_dir_prefix']
+mapred_pid_dir_prefix = config['configurations']['mapred-env']['mapred_pid_dir_prefix']
 yarn_pid_dir = format("{yarn_pid_dir_prefix}/{yarn_user}")
 mapred_pid_dir = format("{mapred_pid_dir_prefix}/{mapred_user}")
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/fefc129b/ambari-web/app/data/HDP2/site_properties.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/data/HDP2/site_properties.js b/ambari-web/app/data/HDP2/site_properties.js
index a4a7d6f..3ba0246 100644
--- a/ambari-web/app/data/HDP2/site_properties.js
+++ b/ambari-web/app/data/HDP2/site_properties.js
@@ -1597,45 +1597,6 @@ module.exports =
       "serviceName": "PIG",
       "filename": "pig-properties.xml",
       "category": "Advanced pig-properties"
-    },
-
-    //***************************************** GLUSTERFS stack********************************************
-
-    {
-      "id": "site property",
-      "name": "fs.glusterfs.impl",
-      "displayName": "GlusterFS fs impl",
-      "displayType": "string",
-      "filename": "core-site.xml",
-      "serviceName": "GLUSTERFS",
-      "category": "General"
-    },
-    {
-      "id": "site property",
-      "name": "fs.AbstractFileSystem.glusterfs.impl",
-      "displayName": "Abstract File System Implementation",
-      "displayType": "string",
-      "filename": "core-site.xml",
-      "serviceName": "GLUSTERFS",
-      "category": "General"
-    },
-    {
-      "id": "site property",
-      "name": "fs.glusterfs.volumes",
-      "displayName": "Gluster volume name(s)",
-      "displayType": "string",
-      "filename": "core-site.xml",
-      "serviceName": "GLUSTERFS",
-      "category": "General"
-    },
-    {
-      "id": "site property",
-      "name": "fs.glusterfs.volume.fuse.gv0",
-      "displayName": "Gluster mount point for volume",
-      "displayType": "string",
-      "filename": "core-site.xml",
-      "serviceName": "GLUSTERFS",
-      "category": "General"
     }, 
   /********************************************* flume-agent *****************************/
     {
@@ -2131,32 +2092,67 @@ module.exports =
       "filename": "hbase-env.xml",
       "category": "Advanced hbase-env"
     },
+     //***************************************** GLUSTERFS stack********************************************
+    {
+      "id": "site property",
+      "name": "fs.glusterfs.impl",
+      "displayName": "GlusterFS fs impl",
+      "displayType": "string",
+      "filename": "core-site.xml",
+      "serviceName": "GLUSTERFS",
+      "category": "General"
+    },
+    {
+      "id": "site property",
+      "name": "fs.AbstractFileSystem.glusterfs.impl",
+      "displayName": "GlusterFS Abstract File System Implementation",
+      "displayType": "string",
+      "filename": "core-site.xml",
+      "serviceName": "GLUSTERFS",
+      "category": "General"
+    },
+    {
+      "id": "site property",
+      "name": "fs.glusterfs.volumes",
+      "displayName": "Gluster volume name(s)",
+      "displayType": "string",
+      "filename": "core-site.xml",
+      "serviceName": "GLUSTERFS",
+      "category": "General"
+    },
+    {
+      "id": "site property",
+      "name": "fs.glusterfs.volume.fuse.gv0",
+      "displayName": "Gluster mount point for volume",
+      "displayType": "string",
+      "filename": "core-site.xml",
+      "serviceName": "GLUSTERFS",
+      "category": "General"
+    },
   /**********************************************GLUSTERFS***************************************/
     {
       "id": "puppet var",
-      "name": "glusterfs_defaultFS_name",
-      "displayName": "GlusterFS default fs name",
-      "description": "GlusterFS default filesystem name (glusterfs:///)",
+      "name": "fs_glusterfs_default_name",
+      "displayName": "GlusterFS default fs name 1.x Hadoop",
+      "description": "GlusterFS default filesystem name (glusterfs://{MasterFQDN}:9000)",
       "defaultValue": "glusterfs:///localhost:8020",
       "displayType": "string",
       "isVisible": true,
-      "domain": "global",
       "serviceName": "GLUSTERFS",
-      "filename": "glusterfs-env.xml",
-      "category": "General"
+      "filename": "hadoop-env.xml",
+      "category": "General Hadoop"
     },
     {
       "id": "puppet var",
-      "name": "fs_glusterfs_default_name",
-      "displayName": "GlusterFS default fs name",
+      "name": "glusterfs_defaultFS_name",
+      "displayName": "GlusterFS default fs name 2.x Hadoop",
       "description": "GlusterFS default filesystem name (glusterfs:///)",
       "defaultValue": "glusterfs:///localhost:8020",
       "displayType": "string",
       "isVisible": true,
-      "domain": "global",
       "serviceName": "GLUSTERFS",
-      "filename": "glusterfs-env.xml",
-      "category": "General"
+      "filename": "hadoop-env.xml",
+      "category": "General Hadoop"
     },
     {
       "id": "puppet var",
@@ -2167,10 +2163,10 @@ module.exports =
       "displayType": "int",
       "unit": "MB",
       "isVisible": true,
-      "domain": "global",
       "serviceName": "GLUSTERFS",
-      "filename": "glusterfs-env.xml",
-      "category": "General"
+      "filename": "hadoop-env.xml",
+      "category": "General Hadoop",
+      "index": 1
     },
     {
       "id": "puppet var",
@@ -2181,10 +2177,10 @@ module.exports =
       "isReconfigurable": false,
       "displayType": "directory",
       "isOverridable": false,
-      "isVisible": true,
+      "isVisible": false,
       "serviceName": "GLUSTERFS",
-      "filename": "glusterfs-env.xml",
-      "category": "Advanced"
+      "filename": "hadoop-env.xml",
+      "category": "General Hadoop"
     },
     {
       "id": "puppet var",
@@ -2195,10 +2191,10 @@ module.exports =
       "isReconfigurable": false,
       "displayType": "directory",
       "isOverridable": false,
-      "isVisible": true,
+      "isVisible": false,
       "serviceName": "GLUSTERFS",
-      "filename": "glusterfs-env.xml",
-      "category": "Advanced"
+      "filename": "hadoop-env.xml",
+      "category": "General Hadoop"
     },
     {
       "id": "puppet var",
@@ -2211,8 +2207,8 @@ module.exports =
       "isOverridable": false,
       "isVisible": false,
       "serviceName": "GLUSTERFS",
-      "filename": "glusterfs-env.xml",
-      "category": "Advanced"
+      "filename": "hadoop-env.xml",
+      "category": "General Hadoop"
     },
     {
       "id": "puppet var",
@@ -2225,8 +2221,8 @@ module.exports =
       "isOverridable": false,
       "isVisible": false,
       "serviceName": "GLUSTERFS",
-      "filename": "glusterfs-env.xml",
-      "category": "Advanced"
+      "filename": "hadoop-env.xml",
+      "category": "General Hadoop"
     },
     {
       "id": "puppet var",
@@ -2239,8 +2235,8 @@ module.exports =
       "isOverridable": false,
       "isVisible": false,
       "serviceName": "GLUSTERFS",
-      "filename": "glusterfs-env.xml",
-      "category": "Advanced"
+      "filename": "hadoop-env.xml",
+      "category": "General Hadoop"
     },
     {
       "id": "puppet var",
@@ -2252,8 +2248,56 @@ module.exports =
       "unit": "MB",
       "isVisible": false,
       "serviceName": "GLUSTERFS",
-      "filename": "glusterfs-env.xml",
-      "category": "Advanced"
+      "filename": "hadoop-env.xml",
+      "category": "General Hadoop"
+    },
+    {
+      "id": "puppet var",
+      "name": "glusterfs_user",
+      "displayName": "glusterfs user",
+      "description": "glusterfs user",
+      "defaultValue": "root",
+      "displayType": "string",
+      "isVisible": false,
+      "serviceName": "GLUSTERFS",
+      "filename": "hadoop-env.xml",
+      "category": "General Hadoop"
+    },
+    {
+      "id": "puppet var",
+      "name": "namenode_host",
+      "displayName": "NameNode Host",
+      "description": "NameNode Host.",
+      "defaultValue": "",
+      "displayType": "string",
+      "isVisible": false,
+      "serviceName": "GLUSTERFS",
+      "filename": "hadoop-env.xml",
+      "category": "General Hadoop"
+    },
+    {
+      "id": "puppet var",
+      "name": "snamenode_host",
+      "displayName": "Secondary NameNode Host",
+      "description": "Secondary NameNode Host.",
+      "defaultValue": "",
+      "displayType": "string",
+      "isVisible": false,
+      "serviceName": "GLUSTERFS",
+      "filename": "hadoop-env.xml",
+      "category": "General Hadoop"
+    },
+    {
+      "id": "puppet var",
+      "name": "content",
+      "displayName": "Hadoop Environment Template",
+      "description": "Hadoop Environment Template.",
+      "defaultValue": "",
+      "displayType": "string",
+      "isVisible": false,
+      "serviceName": "GLUSTERFS",
+      "filename": "hadoop-env.xml",
+      "category": "General Hadoop"
     },
   /**********************************************HIVE***************************************/
     {

http://git-wip-us.apache.org/repos/asf/ambari/blob/fefc129b/ambari-web/app/data/site_properties.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/data/site_properties.js b/ambari-web/app/data/site_properties.js
index af85b91..1f6bb63 100644
--- a/ambari-web/app/data/site_properties.js
+++ b/ambari-web/app/data/site_properties.js
@@ -799,43 +799,6 @@ module.exports =
       "filename": "pig-properties.xml",
       "category": "Advanced pig-properties"
     },
-    //***************************************** GLUSTERFS stack********************************************
-    {
-      "id": "site property",
-      "name": "fs.glusterfs.impl",
-      "displayName": "GlusterFS fs impl",
-      "displayType": "string",
-      "filename": "core-site.xml",
-      "serviceName": "GLUSTERFS",
-      "category": "General"
-    },
-    {
-      "id": "site property",
-      "name": "fs.AbstractFileSystem.glusterfs.impl",
-      "displayName": "Abstract File System Implementation",
-      "displayType": "string",
-      "filename": "core-site.xml",
-      "serviceName": "GLUSTERFS",
-      "category": "General"
-    },
-    {
-      "id": "site property",
-      "name": "fs.glusterfs.volumes",
-      "displayName": "Gluster volume name(s)",
-      "displayType": "string",
-      "filename": "core-site.xml",
-      "serviceName": "GLUSTERFS",
-      "category": "General"
-    },
-    {
-      "id": "site property",
-      "name": "fs.glusterfs.volume.fuse.gv0",
-      "displayName": "Gluster mount point for volume",
-      "displayType": "string",
-      "filename": "core-site.xml",
-      "serviceName": "GLUSTERFS",
-      "category": "General"
-    },
   /**********************************************HDFS***************************************/
     {
       "id": "puppet var",
@@ -1015,7 +978,43 @@ module.exports =
       "filename": "hadoop-env.xml",
       "category": "NAMENODE"
     },
-
+    /***************************************** GLUSTERFS stack********************************************/
+    {
+      "id": "site property",
+      "name": "fs.glusterfs.impl",
+      "displayName": "GlusterFS fs impl",
+      "displayType": "string",
+      "filename": "core-site.xml",
+      "serviceName": "GLUSTERFS",
+      "category": "General"
+    },
+    {
+      "id": "site property",
+      "name": "fs.AbstractFileSystem.glusterfs.impl",
+      "displayName": "GlusterFS Abstract File System Implementation",
+      "displayType": "string",
+      "filename": "core-site.xml",
+      "serviceName": "GLUSTERFS",
+      "category": "General"
+    },
+    {
+      "id": "site property",
+      "name": "fs.glusterfs.volumes",
+      "displayName": "Gluster volume name(s)",
+      "displayType": "string",
+      "filename": "core-site.xml",
+      "serviceName": "GLUSTERFS",
+      "category": "General"
+    },
+    {
+      "id": "site property",
+      "name": "fs.glusterfs.volume.fuse.gv0",
+      "displayName": "Gluster mount point for volume",
+      "displayType": "string",
+      "filename": "core-site.xml",
+      "serviceName": "GLUSTERFS",
+      "category": "General"
+    },
   /**********************************************GLUSTERFS***************************************/
     {
       "id": "puppet var",
@@ -1025,10 +1024,9 @@ module.exports =
       "defaultValue": "glusterfs:///localhost:8020",
       "displayType": "string",
       "isVisible": true,
-      "domain": "global",
       "serviceName": "GLUSTERFS",
       "filename": "hadoop-env.xml",
-      "category": "General"
+      "category": "General Hadoop"
     },
     {
       "id": "puppet var",
@@ -1038,23 +1036,9 @@ module.exports =
       "defaultValue": "glusterfs:///localhost:8020",
       "displayType": "string",
       "isVisible": true,
-      "domain": "global",
-      "serviceName": "GLUSTERFS",
-      "filename": "hadoop-env.xml",
-      "category": "General"
-    },
-    {
-      "id": "puppet var",
-      "name": "fs_AbstractFileSystem_glusterfs_impl",
-      "displayName": "GlusterFS Abstract Filesystem declaration",
-      "description": "GlusterFS Abstract Filesystem declaration",
-      "defaultValue": "org.apache.hadoop.fs.local.GlusterFs",
-      "displayType": "string",
-      "isVisible": true,
-      "domain": "global",
       "serviceName": "GLUSTERFS",
       "filename": "hadoop-env.xml",
-      "category": "General"
+      "category": "General Hadoop"
     },
     {
       "id": "puppet var",
@@ -1067,7 +1051,7 @@ module.exports =
       "isVisible": true,
       "serviceName": "GLUSTERFS",
       "filename": "hadoop-env.xml",
-      "category": "General",
+      "category": "General Hadoop",
       "index": 1
     },
     {
@@ -1079,10 +1063,10 @@ module.exports =
       "isReconfigurable": false,
       "displayType": "directory",
       "isOverridable": false,
-      "isVisible": true,
+      "isVisible": false,
       "serviceName": "GLUSTERFS",
       "filename": "hadoop-env.xml",
-      "category": "Advanced hadoop-env"
+      "category": "General Hadoop"
     },
     {
       "id": "puppet var",
@@ -1093,10 +1077,10 @@ module.exports =
       "isReconfigurable": false,
       "displayType": "directory",
       "isOverridable": false,
-      "isVisible": true,
+      "isVisible": false,
       "serviceName": "GLUSTERFS",
       "filename": "hadoop-env.xml",
-      "category": "Advanced hadoop-env"
+      "category": "General Hadoop"
     },
     {
       "id": "puppet var",
@@ -1110,7 +1094,7 @@ module.exports =
       "isVisible": false,
       "serviceName": "GLUSTERFS",
       "filename": "hadoop-env.xml",
-      "category": "Advanced hadoop-env"
+      "category": "General Hadoop"
     },
     {
       "id": "puppet var",
@@ -1124,7 +1108,7 @@ module.exports =
       "isVisible": false,
       "serviceName": "GLUSTERFS",
       "filename": "hadoop-env.xml",
-      "category": "Advanced hadoop-env"
+      "category": "General Hadoop"
     },
     {
       "id": "puppet var",
@@ -1138,7 +1122,7 @@ module.exports =
       "isVisible": false,
       "serviceName": "GLUSTERFS",
       "filename": "hadoop-env.xml",
-      "category": "Advanced hadoop-env"
+      "category": "General Hadoop"
     },
     {
       "id": "puppet var",
@@ -1152,7 +1136,55 @@ module.exports =
       "domain": "datanode-global",
       "serviceName": "GLUSTERFS",
       "filename": "hadoop-env.xml",
-      "category": "Advanced hadoop-env"
+      "category": "General Hadoop"
+    },
+    {
+      "id": "puppet var",
+      "name": "glusterfs_user",
+      "displayName": "glusterfs user",
+      "description": "glusterfs user",
+      "defaultValue": "root",
+      "displayType": "string",
+      "isVisible": false,
+      "serviceName": "GLUSTERFS",
+      "filename": "hadoop-env.xml",
+      "category": "General Hadoop"
+    },
+    {
+      "id": "puppet var",
+      "name": "namenode_host",
+      "displayName": "NameNode Host",
+      "description": "NameNode Host.",
+      "defaultValue": "",
+      "displayType": "string",
+      "isVisible": false,
+      "serviceName": "GLUSTERFS",
+      "filename": "hadoop-env.xml",
+      "category": "General Hadoop"
+    },
+    {
+      "id": "puppet var",
+      "name": "snamenode_host",
+      "displayName": "Secondary NameNode Host",
+      "description": "Secondary NameNode Host.",
+      "defaultValue": "",
+      "displayType": "string",
+      "isVisible": false,
+      "serviceName": "GLUSTERFS",
+      "filename": "hadoop-env.xml",
+      "category": "General Hadoop"
+    },
+    {
+      "id": "puppet var",
+      "name": "content",
+      "displayName": "Hadoop Environment Template",
+      "description": "Hadoop Environment Template.",
+      "defaultValue": "",
+      "displayType": "string",
+      "isVisible": false,
+      "serviceName": "GLUSTERFS",
+      "filename": "hadoop-env.xml",
+      "category": "General Hadoop"
     },
   /**********************************************MAPREDUCE***************************************/
     {

http://git-wip-us.apache.org/repos/asf/ambari/blob/fefc129b/ambari-web/app/models/stack_service.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/models/stack_service.js b/ambari-web/app/models/stack_service.js
index e5535eb..744083b 100644
--- a/ambari-web/app/models/stack_service.js
+++ b/ambari-web/app/models/stack_service.js
@@ -63,7 +63,7 @@ App.StackService = DS.Model.extend(App.ServiceModelMixin, {
 
   configTypesRendered: function () {
     var configTypes = this.get('configTypes');
-    if (this.get('serviceName') == 'HDFS') return configTypes;
+    if (this.get('serviceName') == 'HDFS' || this.get('serviceName') == 'GLUSTERFS') return configTypes;
     else {
       var renderedConfigTypes = $.extend(true, {}, configTypes);
       delete renderedConfigTypes['core-site'];


[19/35] git commit: AMBARI-7015 Config History: API should create exactly one SCV when instructed to save multiple configs in a batch (dsen)

Posted by jo...@apache.org.
AMBARI-7015 Config History: API should create exactly one SCV when instructed to save multiple configs in a batch (dsen)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/2cceee2e
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/2cceee2e
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/2cceee2e

Branch: refs/heads/branch-alerts-dev
Commit: 2cceee2ea3933b05cef97e6d48f029dcebe9c200
Parents: d566bca
Author: Dmytro Sen <ds...@hortonworks.com>
Authored: Tue Aug 26 20:10:18 2014 +0300
Committer: Dmytro Sen <ds...@hortonworks.com>
Committed: Tue Aug 26 20:10:18 2014 +0300

----------------------------------------------------------------------
 .../AmbariManagementControllerImpl.java         | 38 ++++----
 .../server/controller/ClusterRequest.java       | 16 ++--
 .../ambari/server/controller/HostRequest.java   | 11 +--
 .../internal/AbstractResourceProvider.java      | 92 +++++++++++++-------
 .../internal/ClusterResourceProvider.java       |  7 +-
 .../internal/HostResourceProvider.java          | 54 ++++++------
 .../org/apache/ambari/server/state/Cluster.java |  9 +-
 .../ambari/server/state/ConfigHelper.java       |  3 +-
 .../server/state/cluster/ClusterImpl.java       | 52 +++++++----
 .../server/upgrade/AbstractUpgradeCatalog.java  |  3 +-
 .../server/agent/TestHeartbeatMonitor.java      |  4 +-
 .../AmbariManagementControllerTest.java         | 76 ++++++++--------
 ...hYarnCapacitySchedulerReleaseConfigTest.java |  4 +-
 .../internal/ClusterResourceProviderTest.java   | 40 ++++-----
 .../internal/JMXHostProviderTest.java           | 10 +--
 .../ambari/server/state/ConfigHelperTest.java   |  6 +-
 .../server/state/cluster/ClusterTest.java       | 55 +++++++++---
 .../server/state/cluster/ClustersTest.java      |  3 +-
 .../svccomphost/ServiceComponentHostTest.java   |  3 +-
 .../server/upgrade/UpgradeCatalogTest.java      |  2 +-
 20 files changed, 293 insertions(+), 195 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/2cceee2e/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java
index c465189..394f6ad 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java
@@ -53,6 +53,7 @@ import java.util.EnumSet;
 import java.util.HashMap;
 import java.util.HashSet;
 import java.util.LinkedHashSet;
+import java.util.LinkedList;
 import java.util.List;
 import java.util.Map;
 import java.util.Map.Entry;
@@ -676,7 +677,7 @@ public class AmbariManagementControllerImpl implements AmbariManagementControlle
         if (config != null) {
           String authName = getAuthName();
 
-          if (cluster.addDesiredConfig(authName, config) != null) {
+          if (cluster.addDesiredConfig(authName, Collections.singleton(config)) != null) {
             LOG.info("cluster '" + cluster.getClusterName() + "' "
                     + "changed by: '" + authName + "'; "
                     + "type='" + config.getType() + "' "
@@ -1151,14 +1152,15 @@ public class AmbariManagementControllerImpl implements AmbariManagementControlle
 
     final Cluster cluster = clusters.getCluster(request.getClusterName());
     //save data to return configurations created
-    ConfigurationResponse configurationResponse = null;
+    List<ConfigurationResponse> configurationResponses =
+      new LinkedList<ConfigurationResponse>();
     ServiceConfigVersionResponse serviceConfigVersionResponse = null;
 
     // set or create configuration mapping (and optionally create the map of properties)
     if (null != request.getDesiredConfig()) {
-      ConfigurationRequest cr = request.getDesiredConfig();
-
-      Config oldConfig = cluster.getDesiredConfigByType(cr.getType());
+      Set<Config> configs = new HashSet<Config>();
+      String note = null;
+      for (ConfigurationRequest cr: request.getDesiredConfig()) {
 
       if (null != cr.getProperties()) {
         // !!! empty property sets are supported, and need to be able to use
@@ -1173,21 +1175,23 @@ public class AmbariManagementControllerImpl implements AmbariManagementControlle
               request.getClusterName()));
 
           cr.setClusterName(cluster.getClusterName());
-          configurationResponse = createConfiguration(cr);
+          configurationResponses.add(createConfiguration(cr));
         }
       }
-
-      Config baseConfig = cluster.getConfig(cr.getType(), cr.getVersionTag());
-      if (null != baseConfig) {
+        note = cr.getServiceConfigVersionNote();
+        configs.add(cluster.getConfig(cr.getType(), cr.getVersionTag()));
+      }
+      if (!configs.isEmpty()) {
         String authName = getAuthName();
-        serviceConfigVersionResponse = cluster.addDesiredConfig(authName, baseConfig, cr.getServiceConfigVersionNote());
+        serviceConfigVersionResponse = cluster.addDesiredConfig(authName, configs, note);
         if (serviceConfigVersionResponse != null) {
           Logger logger = LoggerFactory.getLogger("configchange");
-          logger.info("cluster '" + request.getClusterName() + "' "
-              + "changed by: '" + authName + "'; "
-              + "type='" + baseConfig.getType() + "' "
-              + "tag='" + baseConfig.getTag() + "'"
-              + (null == oldConfig ? "" : " from='"+ oldConfig.getTag() + "'"));
+          for (Config config: configs) {
+            logger.info("cluster '" + request.getClusterName() + "' "
+                + "changed by: '" + authName + "'; "
+                + "type='" + config.getType() + "' "
+                + "tag='" + config.getTag() + "'");
+          }
         }
       }
     }
@@ -1261,8 +1265,8 @@ public class AmbariManagementControllerImpl implements AmbariManagementControlle
     }
 
     if (serviceConfigVersionResponse != null) {
-      if (configurationResponse != null) {
-        serviceConfigVersionResponse.setConfigurations(Collections.singletonList(configurationResponse));
+      if (!configurationResponses.isEmpty()) {
+        serviceConfigVersionResponse.setConfigurations(configurationResponses);
       }
 
       ClusterResponse clusterResponse =

http://git-wip-us.apache.org/repos/asf/ambari/blob/2cceee2e/ambari-server/src/main/java/org/apache/ambari/server/controller/ClusterRequest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/ClusterRequest.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/ClusterRequest.java
index 14cc6be..caafb25 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/ClusterRequest.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/ClusterRequest.java
@@ -18,6 +18,7 @@
 
 package org.apache.ambari.server.controller;
 
+import java.util.List;
 import java.util.Set;
 
 /**
@@ -35,7 +36,7 @@ public class ClusterRequest {
   
   Set<String> hostNames; // CREATE/UPDATE
   
-  private ConfigurationRequest config = null;
+  private List<ConfigurationRequest> configs = null;
 
   private ServiceConfigVersionRequest serviceConfigVersionRequest = null;
 
@@ -128,19 +129,20 @@ public class ClusterRequest {
   }
   
   /**
-   * Sets the config request (if any)
+   * Sets the configs requests (if any)
    * @param configRequest
    */
-  public void setDesiredConfig(ConfigurationRequest configRequest) {
-    config = configRequest;
+  public void setDesiredConfig(List<ConfigurationRequest> configRequest) {
+    configs = configRequest;
   }
   
   /**
    * Gets any configuration-based request (if any).
-   * @return the configuration request, or <code>null</code> if none is set.
+   * @return the list of configuration requests,
+   * or <code>null</code> if none is set.
    */
-  public ConfigurationRequest getDesiredConfig() {
-    return config;
+  public List<ConfigurationRequest> getDesiredConfig() {
+    return configs;
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/ambari/blob/2cceee2e/ambari-server/src/main/java/org/apache/ambari/server/controller/HostRequest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/HostRequest.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/HostRequest.java
index f3668c1..b577bb0 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/HostRequest.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/HostRequest.java
@@ -18,6 +18,7 @@
 
 package org.apache.ambari.server.controller;
 
+import java.util.List;
 import java.util.Map;
 import java.util.Map.Entry;
 
@@ -28,7 +29,7 @@ public class HostRequest {
   private String clusterName; // CREATE/UPDATE
   private Map<String, String> hostAttributes; // CREATE/UPDATE
   private String rackInfo;
-  private ConfigurationRequest desiredConfig; // UPDATE
+  private List<ConfigurationRequest> desiredConfigs; // UPDATE
   private String maintenanceState; // UPDATE
 
   public HostRequest(String hostname, String clusterName, Map<String, String> hostAttributes) {
@@ -77,12 +78,12 @@ public class HostRequest {
     publicHostname = name;
   }
   
-  public void setDesiredConfig(ConfigurationRequest request) {
-    desiredConfig = request;
+  public void setDesiredConfigs(List<ConfigurationRequest> request) {
+    desiredConfigs = request;
   }
   
-  public ConfigurationRequest getDesiredConfig() {
-    return desiredConfig;
+  public List<ConfigurationRequest> getDesiredConfigs() {
+    return desiredConfigs;
   }
   
   public void setMaintenanceState(String state) {

http://git-wip-us.apache.org/repos/asf/ambari/blob/2cceee2e/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AbstractResourceProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AbstractResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AbstractResourceProvider.java
index a881730..d14cdf3 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AbstractResourceProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AbstractResourceProvider.java
@@ -20,6 +20,7 @@ package org.apache.ambari.server.controller.internal;
 
 import java.util.HashMap;
 import java.util.HashSet;
+import java.util.LinkedList;
 import java.util.List;
 import java.util.Map;
 import java.util.Set;
@@ -343,50 +344,79 @@ public abstract class AbstractResourceProvider extends BaseProvider implements R
    *    whose category is the parent and marked as a desired config.
    * @param properties  the properties on the request.
    */
-  protected ConfigurationRequest getConfigurationRequest(String parentCategory, Map<String, Object> properties) {
+  protected List<ConfigurationRequest> getConfigurationRequests(String parentCategory, Map<String, Object> properties) {
+
+    List<ConfigurationRequest> configs = new LinkedList<ConfigurationRequest>();
+
+    String desiredConfigKey = parentCategory + "/desired_config";
+    // Multiple configs to be updated
+    if (properties.containsKey(desiredConfigKey)
+      && properties.get(desiredConfigKey) instanceof Set) {
+
+      Set<Map<String, Object>> configProperties =
+        (Set<Map<String, Object>>) properties.get(desiredConfigKey);
+      for (Map<String, Object> value: configProperties) {
+        ConfigurationRequest newConfig = new ConfigurationRequest();
+
+        for (Entry<String, Object> e : value.entrySet()) {
+          String propName =
+            PropertyHelper.getPropertyName(desiredConfigKey + '/' + e.getKey());
+          String absCatategory =
+            PropertyHelper.getPropertyCategory(desiredConfigKey + '/' + e.getKey());
+          parseProperties(newConfig, absCatategory, propName, e.getValue().toString());
+        }
+        configs.add(newConfig);
+      }
+      return configs;
+    }
 
     ConfigurationRequest config = null;
-
     // as a convenience, allow consumers to specify name/value overrides in this
     // call instead of forcing a cluster call to do that work
     for (Entry<String, Object> entry : properties.entrySet()) {
       String absCategory = PropertyHelper.getPropertyCategory(entry.getKey());
       String propName = PropertyHelper.getPropertyName(entry.getKey());
 
-      if (absCategory.startsWith(parentCategory + "/desired_config")) {
+      if (absCategory.startsWith(desiredConfigKey)) {
         config = (null == config) ? new ConfigurationRequest() : config;
 
-        if (propName.equals("type"))
-          config.setType(entry.getValue().toString());
-        else if (propName.equals("tag"))
-          config.setVersionTag(entry.getValue().toString());
-        else if (propName.equals("selected")) {
-          config.setSelected(Boolean.parseBoolean(entry.getValue().toString()));
-        }
-        else if (propName.equals("service_config_version_note")) {
-          config.setServiceConfigVersionNote(entry.getValue().toString());
-        }
-        else if (absCategory.endsWith("/properties")) {
-          config.getProperties().put(propName, entry.getValue().toString());
-        }
-        else if (propertiesAttributesPattern.matcher(absCategory).matches()) {
-          String attributeName = absCategory.substring(absCategory.lastIndexOf('/') + 1);
-          Map<String, Map<String, String>> configAttributesMap = config.getPropertiesAttributes();
-          if (null == configAttributesMap) {
-            configAttributesMap = new HashMap<String, Map<String,String>>();
-            config.setPropertiesAttributes(configAttributesMap);
-          }
-          Map<String, String> attributesMap = configAttributesMap.get(attributeName);
-          if (null == attributesMap) {
-            attributesMap = new HashMap<String, String>();
-            configAttributesMap.put(attributeName, attributesMap);
-          }
-          attributesMap.put(PropertyHelper.getPropertyName(entry.getKey()), entry.getValue().toString());
-        }
+        parseProperties(config, absCategory, propName, entry.getValue().toString());
       }
     }
+    if (config != null) {
+      configs.add(config);
+    }
+    return configs;
+  }
 
-    return config;
+  private void parseProperties(ConfigurationRequest config, String absCategory, String propName, String propValue) {
+    if (propName.equals("type"))
+      config.setType(propValue);
+    else if (propName.equals("tag"))
+      config.setVersionTag(propValue);
+    else if (propName.equals("selected")) {
+      config.setSelected(Boolean.parseBoolean(propValue));
+    }
+    else if (propName.equals("service_config_version_note")) {
+      config.setServiceConfigVersionNote(propValue);
+    }
+    else if (absCategory.endsWith("/properties")) {
+      config.getProperties().put(propName, propValue);
+    }
+    else if (propertiesAttributesPattern.matcher(absCategory).matches()) {
+      String attributeName = absCategory.substring(absCategory.lastIndexOf('/') + 1);
+      Map<String, Map<String, String>> configAttributesMap = config.getPropertiesAttributes();
+      if (null == configAttributesMap) {
+        configAttributesMap = new HashMap<String, Map<String,String>>();
+        config.setPropertiesAttributes(configAttributesMap);
+      }
+      Map<String, String> attributesMap = configAttributesMap.get(attributeName);
+      if (null == attributesMap) {
+        attributesMap = new HashMap<String, String>();
+        configAttributesMap.put(attributeName, attributesMap);
+      }
+      attributesMap.put(propName, propValue);
+    }
   }
 
   // get the resources (id fields only) for the given predicate.

http://git-wip-us.apache.org/repos/asf/ambari/blob/2cceee2e/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClusterResourceProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClusterResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClusterResourceProvider.java
index b0e2dd4..3fcfcd4 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClusterResourceProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClusterResourceProvider.java
@@ -23,6 +23,7 @@ import java.util.Collections;
 import java.util.HashMap;
 import java.util.HashSet;
 import java.util.Iterator;
+import java.util.List;
 import java.util.Map;
 import java.util.Set;
 
@@ -319,12 +320,12 @@ public class ClusterResourceProvider extends BaseBlueprintProcessor {
         (String) properties.get(CLUSTER_VERSION_PROPERTY_ID),
         null);
 
-    ConfigurationRequest configRequest = getConfigurationRequest("Clusters", properties);
+    List<ConfigurationRequest> configRequests = getConfigurationRequests("Clusters", properties);
 
     ServiceConfigVersionRequest serviceConfigVersionRequest = getServiceConfigVersionRequest("Clusters", properties);
 
-    if (null != configRequest)
-      cr.setDesiredConfig(configRequest);
+    if (!configRequests.isEmpty())
+      cr.setDesiredConfig(configRequests);
 
     if (serviceConfigVersionRequest != null) {
       cr.setServiceConfigVersionRequest(serviceConfigVersionRequest);

http://git-wip-us.apache.org/repos/asf/ambari/blob/2cceee2e/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/HostResourceProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/HostResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/HostResourceProvider.java
index 559f64d..03f7233 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/HostResourceProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/HostResourceProvider.java
@@ -330,9 +330,9 @@ public class HostResourceProvider extends AbstractControllerResourceProvider {
     if (null != o)
       hostRequest.setMaintenanceState(o.toString());
     
-    ConfigurationRequest cr = getConfigurationRequest("Hosts", properties);
+    List<ConfigurationRequest> cr = getConfigurationRequests("Hosts", properties);
     
-    hostRequest.setDesiredConfig(cr);
+    hostRequest.setDesiredConfigs(cr);
 
     return hostRequest;
   }
@@ -588,40 +588,40 @@ public class HostResourceProvider extends AbstractControllerResourceProvider {
         }
       }
 
-      if (null != request.getClusterName() && null != request.getDesiredConfig()) {
+      if (null != request.getClusterName() && null != request.getDesiredConfigs()) {
         Cluster c = clusters.getCluster(request.getClusterName());
 
         if (clusters.getHostsForCluster(request.getClusterName()).containsKey(h.getHostName())) {
 
-          ConfigurationRequest cr = request.getDesiredConfig();
+          for (ConfigurationRequest cr : request.getDesiredConfigs()) {
 
-          if (null != cr.getProperties() && cr.getProperties().size() > 0) {
-            LOG.info(MessageFormat.format("Applying configuration with tag ''{0}'' to host ''{1}'' in cluster ''{2}''",
-                cr.getVersionTag(),
-                request.getHostname(),
-                request.getClusterName()));
+            if (null != cr.getProperties() && cr.getProperties().size() > 0) {
+              LOG.info(MessageFormat.format("Applying configuration with tag ''{0}'' to host ''{1}'' in cluster ''{2}''",
+                  cr.getVersionTag(),
+                  request.getHostname(),
+                  request.getClusterName()));
 
-            cr.setClusterName(c.getClusterName());
-            controller.createConfiguration(cr);
-          }
+              cr.setClusterName(c.getClusterName());
+              controller.createConfiguration(cr);
+            }
 
-          Config baseConfig = c.getConfig(cr.getType(), cr.getVersionTag());
-          if (null != baseConfig) {
-            String authName = controller.getAuthName();
-            DesiredConfig oldConfig = h.getDesiredConfigs(c.getClusterId()).get(cr.getType());
-
-            if (h.addDesiredConfig(c.getClusterId(), cr.isSelected(), authName,  baseConfig)) {
-              Logger logger = LoggerFactory.getLogger("configchange");
-              logger.info("cluster '" + c.getClusterName() + "', "
-                  + "host '" + h.getHostName() + "' "
-                  + "changed by: '" + authName + "'; "
-                  + "type='" + baseConfig.getType() + "' "
-                  + "version='" + baseConfig.getVersion() + "'"
-                  + "tag='" + baseConfig.getTag() + "'"
-                  + (null == oldConfig ? "" : ", from='" + oldConfig.getTag() + "'"));
+            Config baseConfig = c.getConfig(cr.getType(), cr.getVersionTag());
+            if (null != baseConfig) {
+              String authName = controller.getAuthName();
+              DesiredConfig oldConfig = h.getDesiredConfigs(c.getClusterId()).get(cr.getType());
+
+              if (h.addDesiredConfig(c.getClusterId(), cr.isSelected(), authName,  baseConfig)) {
+                Logger logger = LoggerFactory.getLogger("configchange");
+                logger.info("cluster '" + c.getClusterName() + "', "
+                    + "host '" + h.getHostName() + "' "
+                    + "changed by: '" + authName + "'; "
+                    + "type='" + baseConfig.getType() + "' "
+                    + "version='" + baseConfig.getVersion() + "'"
+                    + "tag='" + baseConfig.getTag() + "'"
+                    + (null == oldConfig ? "" : ", from='" + oldConfig.getTag() + "'"));
+              }
             }
           }
-
         }
       }
       //todo: if attempt was made to update a property other than those

http://git-wip-us.apache.org/repos/asf/ambari/blob/2cceee2e/ambari-server/src/main/java/org/apache/ambari/server/state/Cluster.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/Cluster.java b/ambari-server/src/main/java/org/apache/ambari/server/state/Cluster.java
index 8970961..2c83f1c 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/Cluster.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/Cluster.java
@@ -21,6 +21,7 @@ package org.apache.ambari.server.state;
 import java.util.Collection;
 import java.util.List;
 import java.util.Map;
+import java.util.Set;
 import java.util.concurrent.locks.ReadWriteLock;
 
 import com.google.common.collect.ListMultimap;
@@ -157,22 +158,22 @@ public interface Cluster {
    * Adds and sets a DESIRED configuration to be applied to a cluster.  There
    * can be only one selected config per type.
    * @param user the user making the change for audit purposes
-   * @param config  the {@link org.apache.ambari.server.state.Config} object to set as desired
+   * @param configs  the set of {@link org.apache.ambari.server.state.Config} objects to set as desired
    * @return <code>true</code> if the config was added, or <code>false</code>
    * if the config is already set as the current
    */
-  public ServiceConfigVersionResponse addDesiredConfig(String user, Config config);
+  public ServiceConfigVersionResponse addDesiredConfig(String user, Set<Config> configs);
 
   /**
    * Adds and sets a DESIRED configuration to be applied to a cluster.  There
    * can be only one selected config per type.
    * @param user the user making the change for audit purposes
-   * @param config  the {@link org.apache.ambari.server.state.Config} object to set as desired
+   * @param configs  the set of {@link org.apache.ambari.server.state.Config} objects to set as desired
    * @param serviceConfigVersionNote note to attach to service config version if created
    * @return <code>true</code> if the config was added, or <code>false</code>
    * if the config is already set as the current
    */
-  ServiceConfigVersionResponse addDesiredConfig(String user, Config config, String serviceConfigVersionNote);
+  ServiceConfigVersionResponse addDesiredConfig(String user, Set<Config> configs, String serviceConfigVersionNote);
 
   ServiceConfigVersionResponse createServiceConfigVersion(String serviceName, String user, String note,
                                                           ConfigGroup configGroup);

http://git-wip-us.apache.org/repos/asf/ambari/blob/2cceee2e/ambari-server/src/main/java/org/apache/ambari/server/state/ConfigHelper.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/ConfigHelper.java b/ambari-server/src/main/java/org/apache/ambari/server/state/ConfigHelper.java
index 1161cc6..a0d9e6e 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/ConfigHelper.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/ConfigHelper.java
@@ -19,6 +19,7 @@ package org.apache.ambari.server.state;
 
 import java.util.ArrayList;
 import java.util.Collection;
+import java.util.Collections;
 import java.util.HashMap;
 import java.util.HashSet;
 import java.util.Iterator;
@@ -477,7 +478,7 @@ public class ConfigHelper {
     Config baseConfig = cluster.getConfig(cr.getType(), cr.getVersionTag());
     
     if (baseConfig != null) {
-      cluster.addDesiredConfig(authName, baseConfig);
+      cluster.addDesiredConfig(authName, Collections.singleton(baseConfig));
     }
   }
   

http://git-wip-us.apache.org/repos/asf/ambari/blob/2cceee2e/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClusterImpl.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClusterImpl.java b/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClusterImpl.java
index ee6952a..8e073f1 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClusterImpl.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClusterImpl.java
@@ -1333,12 +1333,12 @@ public class ClusterImpl implements Cluster {
   }
 
   @Override
-  public ServiceConfigVersionResponse addDesiredConfig(String user, Config config) {
-    return addDesiredConfig(user, config, null);
+  public ServiceConfigVersionResponse addDesiredConfig(String user, Set<Config> configs) {
+    return addDesiredConfig(user, configs, null);
   }
 
   @Override
-  public ServiceConfigVersionResponse addDesiredConfig(String user, Config config, String serviceConfigVersionNote) {
+  public ServiceConfigVersionResponse addDesiredConfig(String user, Set<Config> configs, String serviceConfigVersionNote) {
     if (null == user)
       throw new NullPointerException("User must be specified.");
 
@@ -1346,15 +1346,28 @@ public class ClusterImpl implements Cluster {
     try {
       readWriteLock.writeLock().lock();
       try {
-        Config currentDesired = getDesiredConfigByType(config.getType());
-
-        // do not set if it is already the current
-        if (null != currentDesired && currentDesired.getTag().equals(config.getTag())) {
+        if (configs == null) {
           return null;
         }
 
+        Iterator<Config> configIterator = configs.iterator();
+
+        while (configIterator.hasNext()) {
+          Config config = configIterator.next();
+          if (config == null) {
+            configIterator.remove();
+            continue;
+          }
+          Config currentDesired = getDesiredConfigByType(config.getType());
+
+          // do not set if it is already the current
+          if (null != currentDesired && currentDesired.getTag().equals(config.getTag())) {
+            configIterator.remove();
+          }
+        }
+
         ServiceConfigVersionResponse serviceConfigVersionResponse =
-            applyConfig(config.getType(), config.getTag(), user, serviceConfigVersionNote);
+            applyConfigs(configs, user, serviceConfigVersionNote);
 
         configHelper.invalidateStaleConfigsCache();
         return serviceConfigVersionResponse;
@@ -1705,16 +1718,23 @@ public class ClusterImpl implements Cluster {
   }
 
   @Transactional
-  ServiceConfigVersionResponse applyConfig(String type, String tag, String user, String serviceConfigVersionNote) {
-
-    selectConfig(type, tag, user);
+  ServiceConfigVersionResponse applyConfigs(Set<Config> configs, String user, String serviceConfigVersionNote) {
 
     String serviceName = null;
-    //find service name for config type
-    for (Entry<String, String> entry : serviceConfigTypes.entries()) {
-      if (StringUtils.equals(entry.getValue(), type)) {
-        serviceName = entry.getKey();
-        break;
+    for (Config config: configs) {
+
+      selectConfig(config.getType(), config.getTag(), user);
+      //find service name for config type
+      for (Entry<String, String> entry : serviceConfigTypes.entries()) {
+        if (StringUtils.equals(entry.getValue(), config.getType())) {
+          if (serviceName != null && !serviceName.equals(entry.getKey())) {
+            LOG.error("Updating configs for multiple services by a " +
+              "single API request isn't supported, config version not created");
+            return null;
+          }
+          serviceName = entry.getKey();
+          break;
+        }
       }
     }
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/2cceee2e/ambari-server/src/main/java/org/apache/ambari/server/upgrade/AbstractUpgradeCatalog.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/AbstractUpgradeCatalog.java b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/AbstractUpgradeCatalog.java
index 4d68402..a53159c 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/AbstractUpgradeCatalog.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/AbstractUpgradeCatalog.java
@@ -38,6 +38,7 @@ import org.slf4j.LoggerFactory;
 
 import javax.persistence.EntityManager;
 import java.sql.SQLException;
+import java.util.Collections;
 import java.util.Comparator;
 import java.util.HashMap;
 import java.util.Map;
@@ -227,7 +228,7 @@ public abstract class AbstractUpgradeCatalog implements UpgradeCatalog {
               if (baseConfig != null) {
                 String authName = "ambari-upgrade";
 
-                if (cluster.addDesiredConfig(authName, baseConfig) != null) {
+                if (cluster.addDesiredConfig(authName, Collections.singleton(baseConfig)) != null) {
                   String oldConfigString = (oldConfig != null) ? " from='" + oldConfig.getTag() + "'" : "";
                   LOG.info("cluster '" + cluster.getClusterName() + "' "
                     + "changed by: '" + authName + "'; "

http://git-wip-us.apache.org/repos/asf/ambari/blob/2cceee2e/ambari-server/src/test/java/org/apache/ambari/server/agent/TestHeartbeatMonitor.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/agent/TestHeartbeatMonitor.java b/ambari-server/src/test/java/org/apache/ambari/server/agent/TestHeartbeatMonitor.java
index 847a34d..e80e3e5 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/agent/TestHeartbeatMonitor.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/agent/TestHeartbeatMonitor.java
@@ -127,7 +127,7 @@ public class TestHeartbeatMonitor {
         new HashMap<String,String>() {{ put("a", "b"); }}, new HashMap<String, Map<String,String>>());
     config.setTag("version1");
     cluster.addConfig(config);
-    cluster.addDesiredConfig("_test", config);
+    cluster.addDesiredConfig("_test", Collections.singleton(config));
     
     
     clusters.mapHostsToCluster(hostNames, clusterName);
@@ -218,7 +218,7 @@ public class TestHeartbeatMonitor {
       }}, new HashMap<String, Map<String,String>>());
     config.setTag("version1");
     cluster.addConfig(config);
-    cluster.addDesiredConfig("_test", config);
+    cluster.addDesiredConfig("_test", Collections.singleton(config));
 
 
     clusters.mapHostsToCluster(hostNames, clusterName);

http://git-wip-us.apache.org/repos/asf/ambari/blob/2cceee2e/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java
index ca76dc5..cf2bcbb 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java
@@ -2274,7 +2274,7 @@ public class AmbariManagementControllerTest {
     cr1 = new ConfigurationRequest(clusterName, "hdfs-site", "version1",
         configs, null);
     ClusterRequest crReq = new ClusterRequest(null, clusterName, null, null);
-    crReq.setDesiredConfig(cr1);
+    crReq.setDesiredConfig(Collections.singletonList(cr1));
     controller.updateClusters(Collections.singleton(crReq), null);
 
     // Start
@@ -4220,8 +4220,8 @@ public class AmbariManagementControllerTest {
 
     cluster.addConfig(config1);
     cluster.addConfig(config2);
-    cluster.addDesiredConfig("_test", config1);
-    cluster.addDesiredConfig("_test", config2);
+    cluster.addDesiredConfig("_test", Collections.singleton(config1));
+    cluster.addDesiredConfig("_test", Collections.singleton(config2));
 
     Service hdfs = cluster.addService("HDFS");
     hdfs.persist();
@@ -4482,8 +4482,8 @@ public class AmbariManagementControllerTest {
 
     cluster.addConfig(config1);
     cluster.addConfig(config2);
-    cluster.addDesiredConfig("_test", config1);
-    cluster.addDesiredConfig("_test", config2);
+    cluster.addDesiredConfig("_test", Collections.singleton(config1));
+    cluster.addDesiredConfig("_test", Collections.singleton(config2));
 
     Service hdfs = cluster.addService("HDFS");
     Service mapReduce = cluster.addService("MAPREDUCE");
@@ -4666,8 +4666,8 @@ public class AmbariManagementControllerTest {
 
 
     ClusterRequest cr = new ClusterRequest(null, clusterName, null, null);
-    cr.setDesiredConfig(new ConfigurationRequest(clusterName, "global",
-        "v1", configs, null));
+    cr.setDesiredConfig(Collections.singletonList(new ConfigurationRequest(clusterName, "global",
+        "v1", configs, null)));
     controller.updateClusters(Collections.singleton(cr), Collections.<String, String>emptyMap());
 
     Set<ServiceRequest> sReqs = new HashSet<ServiceRequest>();
@@ -5268,10 +5268,10 @@ public class AmbariManagementControllerTest {
       configs, null);
 
     ClusterRequest crReq = new ClusterRequest(null, clusterName, null, null);
-    crReq.setDesiredConfig(cr1);
+    crReq.setDesiredConfig(Collections.singletonList(cr1));
     controller.updateClusters(Collections.singleton(crReq), null);
     crReq = new ClusterRequest(null, clusterName, null, null);
-    crReq.setDesiredConfig(cr2);
+    crReq.setDesiredConfig(Collections.singletonList(cr2));
     controller.updateClusters(Collections.singleton(crReq), null);
 
     // Install
@@ -5294,7 +5294,7 @@ public class AmbariManagementControllerTest {
     cr3 = new ConfigurationRequest(clusterName, "core-site","version122",
       configs, null);
     crReq = new ClusterRequest(null, clusterName, null, null);
-    crReq.setDesiredConfig(cr3);
+    crReq.setDesiredConfig(Collections.singletonList(cr3));
     controller.updateClusters(Collections.singleton(crReq), null);
 
     // Stop HDFS & MAPREDUCE
@@ -5425,10 +5425,10 @@ public class AmbariManagementControllerTest {
       configs, null);
 
     ClusterRequest crReq = new ClusterRequest(null, clusterName, null, null);
-    crReq.setDesiredConfig(cr1);
+    crReq.setDesiredConfig(Collections.singletonList(cr1));
     controller.updateClusters(Collections.singleton(crReq), null);
     crReq = new ClusterRequest(null, clusterName, null, null);
-    crReq.setDesiredConfig(cr2);
+    crReq.setDesiredConfig(Collections.singletonList(cr2));
     controller.updateClusters(Collections.singleton(crReq), null);
 
     installService(clusterName, serviceName, false, false);
@@ -5451,7 +5451,7 @@ public class AmbariManagementControllerTest {
     cr3 = new ConfigurationRequest(clusterName, "core-site","version122",
       configs, null);
     crReq = new ClusterRequest(null, clusterName, null, null);
-    crReq.setDesiredConfig(cr3);
+    crReq.setDesiredConfig(Collections.singletonList(cr3));
     controller.updateClusters(Collections.singleton(crReq), null);
 
     long id = startService(clusterName, serviceName, false, true);
@@ -6027,7 +6027,7 @@ public class AmbariManagementControllerTest {
     cr1 = new ConfigurationRequest(clusterName, "hive-site","version1",
       configs, null);
     ClusterRequest crReq = new ClusterRequest(null, clusterName, null, null);
-    crReq.setDesiredConfig(cr1);
+    crReq.setDesiredConfig(Collections.singletonList(cr1));
     controller.updateClusters(Collections.singleton(crReq), null);
 
     // Install
@@ -6105,7 +6105,7 @@ public class AmbariManagementControllerTest {
     cr1 = new ConfigurationRequest(clusterName, "hdfs-site","version1",
       configs, null);
     ClusterRequest crReq = new ClusterRequest(null, clusterName, null, null);
-    crReq.setDesiredConfig(cr1);
+    crReq.setDesiredConfig(Collections.singletonList(cr1));
     controller.updateClusters(Collections.singleton(crReq), null);
 
     // Start
@@ -6491,10 +6491,10 @@ public class AmbariManagementControllerTest {
       configs, null);
 
     ClusterRequest crReq = new ClusterRequest(null, clusterName, null, null);
-    crReq.setDesiredConfig(cr1);
+    crReq.setDesiredConfig(Collections.singletonList(cr1));
     controller.updateClusters(Collections.singleton(crReq), null);
     crReq = new ClusterRequest(null, clusterName, null, null);
-    crReq.setDesiredConfig(cr2);
+    crReq.setDesiredConfig(Collections.singletonList(cr2));
     controller.updateClusters(Collections.singleton(crReq), null);
 
     // Install
@@ -6716,13 +6716,13 @@ public class AmbariManagementControllerTest {
       configs, null);
 
     ClusterRequest crReq = new ClusterRequest(null, clusterName, null, null);
-    crReq.setDesiredConfig(cr1);
+    crReq.setDesiredConfig(Collections.singletonList(cr1));
     controller.updateClusters(Collections.singleton(crReq), null);
     crReq = new ClusterRequest(null, clusterName, null, null);
-    crReq.setDesiredConfig(cr2);
+    crReq.setDesiredConfig(Collections.singletonList(cr2));
     controller.updateClusters(Collections.singleton(crReq), null);
     crReq = new ClusterRequest(null, clusterName, null, null);
-    crReq.setDesiredConfig(cr3);
+    crReq.setDesiredConfig(Collections.singletonList(cr3));
     controller.updateClusters(Collections.singleton(crReq), null);
 
     // Create Config group for core-site
@@ -6874,7 +6874,7 @@ public class AmbariManagementControllerTest {
     cr1 = new ConfigurationRequest(clusterName, "hdfs-site", "version1",
         configs, null);
     ClusterRequest crReq = new ClusterRequest(null, clusterName, null, null);
-    crReq.setDesiredConfig(cr1);
+    crReq.setDesiredConfig(Collections.singletonList(cr1));
     controller.updateClusters(Collections.singleton(crReq), null);
 
     // Start
@@ -6977,10 +6977,10 @@ public class AmbariManagementControllerTest {
       configs, null);
 
     ClusterRequest crReq = new ClusterRequest(null, clusterName, null, null);
-    crReq.setDesiredConfig(cr1);
+    crReq.setDesiredConfig(Collections.singletonList(cr1));
     controller.updateClusters(Collections.singleton(crReq), null);
     crReq = new ClusterRequest(null, clusterName, null, null);
-    crReq.setDesiredConfig(cr2);
+    crReq.setDesiredConfig(Collections.singletonList(cr2));
     controller.updateClusters(Collections.singleton(crReq), null);
 
     // Install
@@ -8831,7 +8831,7 @@ public class AmbariManagementControllerTest {
 
       ConfigurationRequest configRequest = new ConfigurationRequest(CLUSTER_NAME, "global", "version1",
           new HashMap<String, String>() {{ put("a", "b"); }}, null);
-      cr.setDesiredConfig(configRequest);
+      cr.setDesiredConfig(Collections.singletonList(configRequest));
       amc.updateClusters(Collections.singleton(cr), new HashMap<String, String>());
 
       // add some hosts
@@ -10067,22 +10067,22 @@ public class AmbariManagementControllerTest {
     ClusterRequest cr = new ClusterRequest(null, cluster.getClusterName(), null, null);
 
     // test null map with no prior
-    cr.setDesiredConfig(
-        new ConfigurationRequest(clusterName, "typeA", "v1", null, null));
+    cr.setDesiredConfig(Collections.singletonList(
+        new ConfigurationRequest(clusterName, "typeA", "v1", null, null)));
     controller.updateClusters(Collections.singleton(cr), new HashMap<String, String>());
     Config config = cluster.getDesiredConfigByType("typeA");
     Assert.assertNull(config);
 
     // test empty map with no prior
-    cr.setDesiredConfig(
-        new ConfigurationRequest(clusterName, "typeA", "v1", new HashMap<String, String>(), new HashMap<String, Map<String,String>>()));
+    cr.setDesiredConfig(Collections.singletonList(
+        new ConfigurationRequest(clusterName, "typeA", "v1", new HashMap<String, String>(), new HashMap<String, Map<String,String>>())));
     controller.updateClusters(Collections.singleton(cr), new HashMap<String, String>());
     config = cluster.getDesiredConfigByType("typeA");
     Assert.assertNotNull(config);
 
     // test empty properties on a new version
-    cr.setDesiredConfig(
-        new ConfigurationRequest(clusterName, "typeA", "v2", new HashMap<String, String>(), new HashMap<String, Map<String,String>>()));
+    cr.setDesiredConfig(Collections.singletonList(
+        new ConfigurationRequest(clusterName, "typeA", "v2", new HashMap<String, String>(), new HashMap<String, Map<String,String>>())));
     controller.updateClusters(Collections.singleton(cr), new HashMap<String, String>());
     config = cluster.getDesiredConfigByType("typeA");
     Assert.assertNotNull(config);
@@ -10095,16 +10095,16 @@ public class AmbariManagementControllerTest {
     Map<String, Map<String, String>> attributesMap = new HashMap<String, Map<String,String>>();
     attributesMap.put("final", new HashMap<String, String>());
     attributesMap.get("final").put("c", "true");
-    cr.setDesiredConfig(
-        new ConfigurationRequest(clusterName, "typeA", "v3", map, attributesMap));
+    cr.setDesiredConfig(Collections.singletonList(
+        new ConfigurationRequest(clusterName, "typeA", "v3", map, attributesMap)));
     controller.updateClusters(Collections.singleton(cr), new HashMap<String, String>());
     config = cluster.getDesiredConfigByType("typeA");
     Assert.assertNotNull(config);
     Assert.assertTrue(config.getProperties().containsKey("c"));
 
     // test reset to v2
-    cr.setDesiredConfig(
-        new ConfigurationRequest(clusterName, "typeA", "v2", new HashMap<String, String>(), new HashMap<String, Map<String,String>>()));
+    cr.setDesiredConfig(Collections.singletonList(
+        new ConfigurationRequest(clusterName, "typeA", "v2", new HashMap<String, String>(), new HashMap<String, Map<String,String>>())));
     controller.updateClusters(Collections.singleton(cr), new HashMap<String, String>());
     config = cluster.getDesiredConfigByType("typeA");
     Assert.assertEquals("v2", config.getTag());
@@ -10112,11 +10112,11 @@ public class AmbariManagementControllerTest {
     Assert.assertEquals(Integer.valueOf(0), Integer.valueOf(config.getProperties().size()));
 
     // test v2, but with properties
-    cr.setDesiredConfig(
+    cr.setDesiredConfig(Collections.singletonList(
         new ConfigurationRequest(clusterName, "typeA", "v2", new HashMap<String, String>() {{ put("a", "b"); }},
             new HashMap<String, Map<String,String>>(){{put("final", new HashMap<String, String>(){{put("a", "true");}});
           }
-        }));
+        })));
     try {
       controller.updateClusters(Collections.singleton(cr), new HashMap<String, String>());
       Assert.fail("Expect failure when creating a config that exists");
@@ -10257,7 +10257,7 @@ public class AmbariManagementControllerTest {
 
     ConfigurationRequest cr1 = new ConfigurationRequest(clusterName, "hdfs-site", "version1", hdfsConfigs, hdfsConfigAttributes);
     ClusterRequest crReq1 = new ClusterRequest(null, clusterName, null, null);
-    crReq1.setDesiredConfig(cr1);
+    crReq1.setDesiredConfig(Collections.singletonList(cr1));
 
     controller.updateClusters(Collections.singleton(crReq1), null);
 
@@ -10340,7 +10340,7 @@ public class AmbariManagementControllerTest {
 
     ConfigurationRequest cr1 = new ConfigurationRequest(clusterName, "hdfs-site", "version1", hdfsConfigs, hdfsConfigAttributes);
     ClusterRequest crReq1 = new ClusterRequest(null, clusterName, null, null);
-    crReq1.setDesiredConfig(cr1);
+    crReq1.setDesiredConfig(Collections.singletonList(cr1));
 
     controller.updateClusters(Collections.singleton(crReq1), null);
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/2cceee2e/ambari-server/src/test/java/org/apache/ambari/server/controller/RefreshYarnCapacitySchedulerReleaseConfigTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/RefreshYarnCapacitySchedulerReleaseConfigTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/RefreshYarnCapacitySchedulerReleaseConfigTest.java
index e13c25e..6565219 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/RefreshYarnCapacitySchedulerReleaseConfigTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/RefreshYarnCapacitySchedulerReleaseConfigTest.java
@@ -86,7 +86,7 @@ public class RefreshYarnCapacitySchedulerReleaseConfigTest {
     // Start
     ClusterRequest cr = new ClusterRequest(cluster.getClusterId(), "c1", cluster.getDesiredStackVersion().getStackVersion(), null);
 
-    cr.setDesiredConfig(new ConfigurationRequest("c1","capacity-scheduler","version2",new HashMap<String, String>(), null));
+    cr.setDesiredConfig(Collections.singletonList(new ConfigurationRequest("c1","capacity-scheduler","version2",new HashMap<String, String>(), null)));
     
     controller.updateClusters(Collections.singleton(cr) , null);
     
@@ -106,7 +106,7 @@ public class RefreshYarnCapacitySchedulerReleaseConfigTest {
     // Start
     ClusterRequest cr = new ClusterRequest(cluster.getClusterId(), "c1", cluster.getDesiredStackVersion().getStackVersion(), null);
     
-    cr.setDesiredConfig(new ConfigurationRequest("c1","core-site","version2",new HashMap<String, String>(),null));
+    cr.setDesiredConfig(Collections.singletonList(new ConfigurationRequest("c1","core-site","version2",new HashMap<String, String>(),null)));
     
     controller.updateClusters(Collections.singleton(cr) , null);
     

http://git-wip-us.apache.org/repos/asf/ambari/blob/2cceee2e/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ClusterResourceProviderTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ClusterResourceProviderTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ClusterResourceProviderTest.java
index 7d51184..b5c50ff 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ClusterResourceProviderTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ClusterResourceProviderTest.java
@@ -517,13 +517,13 @@ public class ClusterResourceProviderTest {
     assertEquals(clusterName, ucr5.getClusterName());
     assertEquals(clusterName, ucr6.getClusterName());
     assertEquals(clusterName, ucr7.getClusterName());
-    ConfigurationRequest cr1 = ucr1.getDesiredConfig();
-    ConfigurationRequest cr2 = ucr2.getDesiredConfig();
-    ConfigurationRequest cr3 = ucr3.getDesiredConfig();
-    ConfigurationRequest cr4 = ucr4.getDesiredConfig();
-    ConfigurationRequest cr5 = ucr5.getDesiredConfig();
-    ConfigurationRequest cr6 = ucr6.getDesiredConfig();
-    ConfigurationRequest cr7 = ucr7.getDesiredConfig();
+    ConfigurationRequest cr1 = ucr1.getDesiredConfig().get(0);
+    ConfigurationRequest cr2 = ucr2.getDesiredConfig().get(0);
+    ConfigurationRequest cr3 = ucr3.getDesiredConfig().get(0);
+    ConfigurationRequest cr4 = ucr4.getDesiredConfig().get(0);
+    ConfigurationRequest cr5 = ucr5.getDesiredConfig().get(0);
+    ConfigurationRequest cr6 = ucr6.getDesiredConfig().get(0);
+    ConfigurationRequest cr7 = ucr7.getDesiredConfig().get(0);
     assertEquals("1", cr1.getVersionTag());
     assertEquals("1", cr2.getVersionTag());
     assertEquals("1", cr3.getVersionTag());
@@ -1718,12 +1718,12 @@ public class ClusterResourceProviderTest {
     assertEquals(clusterName, ucr4.getClusterName());
     assertEquals(clusterName, ucr5.getClusterName());
     assertEquals(clusterName, ucr6.getClusterName());
-    ConfigurationRequest cr1 = ucr1.getDesiredConfig();
-    ConfigurationRequest cr2 = ucr2.getDesiredConfig();
-    ConfigurationRequest cr3 = ucr3.getDesiredConfig();
-    ConfigurationRequest cr4 = ucr4.getDesiredConfig();
-    ConfigurationRequest cr5 = ucr5.getDesiredConfig();
-    ConfigurationRequest cr6 = ucr6.getDesiredConfig();
+    ConfigurationRequest cr1 = ucr1.getDesiredConfig().get(0);
+    ConfigurationRequest cr2 = ucr2.getDesiredConfig().get(0);
+    ConfigurationRequest cr3 = ucr3.getDesiredConfig().get(0);
+    ConfigurationRequest cr4 = ucr4.getDesiredConfig().get(0);
+    ConfigurationRequest cr5 = ucr5.getDesiredConfig().get(0);
+    ConfigurationRequest cr6 = ucr6.getDesiredConfig().get(0);
 
     assertEquals("1", cr1.getVersionTag());
     assertEquals("1", cr2.getVersionTag());
@@ -2413,13 +2413,13 @@ public class ClusterResourceProviderTest {
     assertEquals(clusterName, ucr5.getClusterName());
     assertEquals(clusterName, ucr6.getClusterName());
     assertEquals(clusterName, ucr7.getClusterName());
-    ConfigurationRequest cr1 = ucr1.getDesiredConfig();
-    ConfigurationRequest cr2 = ucr2.getDesiredConfig();
-    ConfigurationRequest cr3 = ucr3.getDesiredConfig();
-    ConfigurationRequest cr4 = ucr4.getDesiredConfig();
-    ConfigurationRequest cr5 = ucr5.getDesiredConfig();
-    ConfigurationRequest cr6 = ucr6.getDesiredConfig();
-    ConfigurationRequest cr7 = ucr7.getDesiredConfig();
+    ConfigurationRequest cr1 = ucr1.getDesiredConfig().get(0);
+    ConfigurationRequest cr2 = ucr2.getDesiredConfig().get(0);
+    ConfigurationRequest cr3 = ucr3.getDesiredConfig().get(0);
+    ConfigurationRequest cr4 = ucr4.getDesiredConfig().get(0);
+    ConfigurationRequest cr5 = ucr5.getDesiredConfig().get(0);
+    ConfigurationRequest cr6 = ucr6.getDesiredConfig().get(0);
+    ConfigurationRequest cr7 = ucr7.getDesiredConfig().get(0);
     assertEquals("1", cr1.getVersionTag());
     assertEquals("1", cr2.getVersionTag());
     assertEquals("1", cr3.getVersionTag());

http://git-wip-us.apache.org/repos/asf/ambari/blob/2cceee2e/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/JMXHostProviderTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/JMXHostProviderTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/JMXHostProviderTest.java
index 94bd0c2..a11dc43 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/JMXHostProviderTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/JMXHostProviderTest.java
@@ -188,7 +188,7 @@ public class JMXHostProviderTest {
       ConfigurationRequest cr = new ConfigurationRequest(clusterName,
         "hdfs-site", "version1", configs, null);
       ClusterRequest crequest = new ClusterRequest(null, clusterName, null, null);
-      crequest.setDesiredConfig(cr);
+      crequest.setDesiredConfig(Collections.singletonList(cr));
       controller.updateClusters(Collections.singleton(crequest), new HashMap<String,String>());
       
     } else {
@@ -200,7 +200,7 @@ public class JMXHostProviderTest {
         "hdfs-site", "version2", configs, null);
       
       ClusterRequest crequest = new ClusterRequest(null, clusterName, null, null);
-      crequest.setDesiredConfig(cr);
+      crequest.setDesiredConfig(Collections.singletonList(cr));
       controller.updateClusters(Collections.singleton(crequest), new HashMap<String,String>());
     }
   }
@@ -272,7 +272,7 @@ public class JMXHostProviderTest {
       "hdfs-site", "versionN", configs, null);
 
     ClusterRequest crReq = new ClusterRequest(null, clusterName, null, null);
-    crReq.setDesiredConfig(cr1);
+    crReq.setDesiredConfig(Collections.singletonList(cr1));
     controller.updateClusters(Collections.singleton(crReq), null);
     Cluster cluster = clusters.getCluster(clusterName);
     Assert.assertEquals("versionN", cluster.getDesiredConfigByType("hdfs-site")
@@ -280,7 +280,7 @@ public class JMXHostProviderTest {
 
     ConfigurationRequest cr2 = new ConfigurationRequest(clusterName,
       "yarn-site", "versionN", yarnConfigs, null);
-    crReq.setDesiredConfig(cr2);
+    crReq.setDesiredConfig(Collections.singletonList(cr2));
     controller.updateClusters(Collections.singleton(crReq), null);
 
     Assert.assertEquals("versionN", cluster.getDesiredConfigByType("yarn-site")
@@ -400,7 +400,7 @@ public class JMXHostProviderTest {
       "yarn-site", "versionN+1", yarnConfigs, null);
 
     ClusterRequest crReq = new ClusterRequest(null, "c1", null, null);
-    crReq.setDesiredConfig(cr2);
+    crReq.setDesiredConfig(Collections.singletonList(cr2));
     controller.updateClusters(Collections.singleton(crReq), null);
     Assert.assertEquals("50030", providerModule.getPort("c1", "RESOURCEMANAGER"));
     Assert.assertEquals("11111", providerModule.getPort("c1", "NODEMANAGER"));

http://git-wip-us.apache.org/repos/asf/ambari/blob/2cceee2e/ambari-server/src/test/java/org/apache/ambari/server/state/ConfigHelperTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/state/ConfigHelperTest.java b/ambari-server/src/test/java/org/apache/ambari/server/state/ConfigHelperTest.java
index ffd4358..73004ba 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/state/ConfigHelperTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/state/ConfigHelperTest.java
@@ -36,11 +36,11 @@ import org.junit.Before;
 import org.junit.Test;
 
 import java.util.ArrayList;
+import java.util.Collections;
 import java.util.HashMap;
 import java.util.HashSet;
 import java.util.List;
 import java.util.Map;
-import java.util.Set;
 
 public class ConfigHelperTest {
   private Clusters clusters;
@@ -97,7 +97,7 @@ public class ConfigHelperTest {
       new ClusterRequest(cluster.getClusterId(), clusterName,
         cluster.getDesiredStackVersion().getStackVersion(), null);
 
-    clusterRequest1.setDesiredConfig(cr);
+    clusterRequest1.setDesiredConfig(Collections.singletonList(cr));
     managementController.updateClusters(new HashSet<ClusterRequest>()
     {{ add(clusterRequest1); }}, null);
 
@@ -119,7 +119,7 @@ public class ConfigHelperTest {
       new ClusterRequest(cluster.getClusterId(), clusterName,
         cluster.getDesiredStackVersion().getStackVersion(), null);
 
-    clusterRequest2.setDesiredConfig(cr);
+    clusterRequest2.setDesiredConfig(Collections.singletonList(cr));
     managementController.updateClusters(new HashSet<ClusterRequest>()
     {{ add(clusterRequest2); }}, null);
   }

http://git-wip-us.apache.org/repos/asf/ambari/blob/2cceee2e/ambari-server/src/test/java/org/apache/ambari/server/state/cluster/ClusterTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/state/cluster/ClusterTest.java b/ambari-server/src/test/java/org/apache/ambari/server/state/cluster/ClusterTest.java
index 222924b..201702b 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/state/cluster/ClusterTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/state/cluster/ClusterTest.java
@@ -354,7 +354,7 @@ public class ClusterTest {
     c1.addConfig(config2);
     c1.addConfig(config3);
     
-    c1.addDesiredConfig("_test", config1);
+    c1.addDesiredConfig("_test", Collections.singleton(config1));
     Config res = c1.getDesiredConfigByType("global");
     Assert.assertNotNull("Expected non-null config", res);
     Assert.assertEquals("true", res.getPropertiesAttributes().get("final").get("a"));
@@ -362,7 +362,7 @@ public class ClusterTest {
     res = c1.getDesiredConfigByType("core-site");
     Assert.assertNull("Expected null config", res);
     
-    c1.addDesiredConfig("_test", config2);
+    c1.addDesiredConfig("_test", Collections.singleton(config2));
     res = c1.getDesiredConfigByType("global");
     Assert.assertEquals("Expected version tag to be 'version2'", "version2", res.getTag());
     Assert.assertEquals("true", res.getPropertiesAttributes().get("final").get("x"));
@@ -387,15 +387,15 @@ public class ClusterTest {
     c1.addConfig(config3);
     
     try {
-      c1.addDesiredConfig(null, config1);
+      c1.addDesiredConfig(null, Collections.singleton(config1));
       fail("Cannot set a null user with config");
     }
     catch (Exception e) {
       // test failure
     }
     
-    c1.addDesiredConfig("_test1", config1);
-    c1.addDesiredConfig("_test3", config3);
+    c1.addDesiredConfig("_test1", Collections.singleton(config1));
+    c1.addDesiredConfig("_test3", Collections.singleton(config3));
     
     Map<String, DesiredConfig> desiredConfigs = c1.getDesiredConfigs();
     Assert.assertFalse("Expect desired config not contain 'mapred-site'", desiredConfigs.containsKey("mapred-site"));
@@ -409,10 +409,10 @@ public class ClusterTest {
     Assert.assertTrue("Expect no host-level overrides",
         (null == dc.getHostOverrides() || dc.getHostOverrides().size() == 0));
     
-    c1.addDesiredConfig("_test2", config2);
+    c1.addDesiredConfig("_test2", Collections.singleton(config2));
     Assert.assertEquals("_test2", c1.getDesiredConfigs().get(config2.getType()).getUser());
     
-    c1.addDesiredConfig("_test1", config1);
+    c1.addDesiredConfig("_test1", Collections.singleton(config1));
 
     // setup a host that also has a config override
     Host host = clusters.getHost("h1");
@@ -647,7 +647,7 @@ public class ClusterTest {
     c1.addConfig(config1);
     c1.addConfig(config2);
 
-    c1.addDesiredConfig("admin", config1);
+    c1.addDesiredConfig("admin", Collections.singleton(config1));
     List<ServiceConfigVersionResponse> serviceConfigVersions =
       c1.getServiceConfigVersions();
     Assert.assertNotNull(serviceConfigVersions);
@@ -663,7 +663,7 @@ public class ClusterTest {
     Assert.assertEquals("admin", mapredResponse.getUserName());
     Assert.assertEquals(Long.valueOf(1), mapredResponse.getVersion());
 
-    c1.addDesiredConfig("admin", config2);
+    c1.addDesiredConfig("admin", Collections.singleton(config2));
     serviceConfigVersions = c1.getServiceConfigVersions();
     Assert.assertNotNull(serviceConfigVersions);
     // created new ServiceConfigVersion
@@ -676,7 +676,7 @@ public class ClusterTest {
     Assert.assertEquals("admin", mapredResponse.getUserName());
 
     // Rollback , clonning version1 config, created new ServiceConfigVersion
-    c1.addDesiredConfig("admin", config1);
+    c1.addDesiredConfig("admin", Collections.singleton(config1));
     serviceConfigVersions = c1.getServiceConfigVersions();
     Assert.assertNotNull(serviceConfigVersions);
     // created new ServiceConfigVersion
@@ -688,4 +688,39 @@ public class ClusterTest {
     Assert.assertEquals("c1", mapredResponse.getClusterName());
     Assert.assertEquals("admin", mapredResponse.getUserName());
   }
+
+  @Test
+  public void testSingleServiceVersionForMultipleConfigs() {
+    Config config1 = configFactory.createNew(c1, "hdfs-site",
+      new HashMap<String, String>() {{ put("a", "b"); }}, new HashMap<String, Map<String,String>>());
+    config1.setTag("version1");
+
+    Config config2 = configFactory.createNew(c1, "core-site",
+      new HashMap<String, String>() {{ put("x", "y"); }}, new HashMap<String, Map<String,String>>());
+    config2.setTag("version2");
+
+    c1.addConfig(config1);
+    c1.addConfig(config2);
+
+    Set<Config> configs = new HashSet<Config>();
+    configs.add(config1);
+    configs.add(config2);
+
+    c1.addDesiredConfig("admin", configs);
+    List<ServiceConfigVersionResponse> serviceConfigVersions =
+      c1.getServiceConfigVersions();
+    Assert.assertNotNull(serviceConfigVersions);
+    // Single serviceConfigVersion for multiple configs
+    Assert.assertEquals(1, serviceConfigVersions.size());
+    Assert.assertEquals(Long.valueOf(1), serviceConfigVersions.get(0).getVersion());
+    Assert.assertEquals(2, c1.getDesiredConfigs().size());
+    Assert.assertEquals("version1", c1.getDesiredConfigByType("hdfs-site").getTag());
+    Assert.assertEquals("version2", c1.getDesiredConfigByType("core-site").getTag());
+
+    Map<String, ServiceConfigVersionResponse> activeServiceConfigVersions =
+      c1.getActiveServiceConfigVersions();
+    Assert.assertEquals(1, activeServiceConfigVersions.size());
+
+
+  }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/2cceee2e/ambari-server/src/test/java/org/apache/ambari/server/state/cluster/ClustersTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/state/cluster/ClustersTest.java b/ambari-server/src/test/java/org/apache/ambari/server/state/cluster/ClustersTest.java
index e794e01..2baa5c5 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/state/cluster/ClustersTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/state/cluster/ClustersTest.java
@@ -21,6 +21,7 @@ package org.apache.ambari.server.state.cluster;
 import static org.junit.Assert.fail;
 
 import java.util.Arrays;
+import java.util.Collections;
 import java.util.HashMap;
 import java.util.HashSet;
 import java.util.List;
@@ -330,7 +331,7 @@ public class ClustersTest {
     config2.persist();
     
     // cluster desired config
-    cluster.addDesiredConfig("_test", config1);
+    cluster.addDesiredConfig("_test", Collections.singleton(config1));
 
     clusters.addHost(h1);
     clusters.addHost(h2);

http://git-wip-us.apache.org/repos/asf/ambari/blob/2cceee2e/ambari-server/src/test/java/org/apache/ambari/server/state/svccomphost/ServiceComponentHostTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/state/svccomphost/ServiceComponentHostTest.java b/ambari-server/src/test/java/org/apache/ambari/server/state/svccomphost/ServiceComponentHostTest.java
index c0bdaa2..5d85b0d 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/state/svccomphost/ServiceComponentHostTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/state/svccomphost/ServiceComponentHostTest.java
@@ -18,6 +18,7 @@
 
 package org.apache.ambari.server.state.svccomphost;
 
+import java.util.Collections;
 import java.util.HashMap;
 import java.util.HashSet;
 import java.util.Map;
@@ -1041,7 +1042,7 @@ public class ServiceComponentHostTest {
     config.setTag(tag);
     config.persist();
     cluster.addConfig(config);
-    cluster.addDesiredConfig("user", config);
+    cluster.addDesiredConfig("user", Collections.singleton(config));
   }
   
   @Test

http://git-wip-us.apache.org/repos/asf/ambari/blob/2cceee2e/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalogTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalogTest.java b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalogTest.java
index f8d16c8..b0c8fd9 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalogTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalogTest.java
@@ -147,7 +147,7 @@ public class UpgradeCatalogTest {
     cr.setVersionTag("version1");
     cr.setProperties(properties);
 
-    cl.setDesiredConfig(cr);
+    cl.setDesiredConfig(Collections.singletonList(cr));
 
     controller.updateClusters(new HashSet<ClusterRequest>() {{ add(cl); }}, null);
 


[14/35] git commit: AMBARI-7014 FE: Add service wizard not sending configs of added service for validations

Posted by jo...@apache.org.
AMBARI-7014 FE: Add service wizard not sending configs of added service for validations


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/db83ea24
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/db83ea24
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/db83ea24

Branch: refs/heads/branch-alerts-dev
Commit: db83ea24ce47d84895c699137c6c2a9ade2f3577
Parents: 40f1c4e
Author: aBabiichuk <ab...@cybervisiontech.com>
Authored: Tue Aug 26 16:49:06 2014 +0300
Committer: aBabiichuk <ab...@cybervisiontech.com>
Committed: Tue Aug 26 16:49:06 2014 +0300

----------------------------------------------------------------------
 ambari-web/app/mixins/common/serverValidator.js | 2 +-
 ambari-web/app/models/stack_service.js          | 3 ---
 ambari-web/app/utils/config.js                  | 2 +-
 ambari-web/test/utils/blueprint_test.js         | 1 -
 4 files changed, 2 insertions(+), 6 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/db83ea24/ambari-web/app/mixins/common/serverValidator.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/mixins/common/serverValidator.js b/ambari-web/app/mixins/common/serverValidator.js
index 5140224..462ff62 100644
--- a/ambari-web/app/mixins/common/serverValidator.js
+++ b/ambari-web/app/mixins/common/serverValidator.js
@@ -78,7 +78,7 @@ App.ServerValidatorMixin = Em.Mixin.create({
     return this.get('content.serviceName')
         ? [App.StackService.find(this.get('content.serviceName'))]
         : App.StackService.find().filter(function(s){
-          return s.get('allowServerValidator') && (s.get('isSelected') || s.get('isInsalled'))
+          return (s.get('isSelected') || s.get('isInstalled'))
         }).concat(require("data/service_configs"));
   }.property('content.serviceName'),
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/db83ea24/ambari-web/app/models/stack_service.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/models/stack_service.js b/ambari-web/app/models/stack_service.js
index 744083b..e19e27f 100644
--- a/ambari-web/app/models/stack_service.js
+++ b/ambari-web/app/models/stack_service.js
@@ -171,9 +171,6 @@ App.StackService = DS.Model.extend(App.ServiceModelMixin, {
     return defaultConfigsHandler && defaultConfigsHandler.configsValidator;
   }.property('serviceName'),
 
-  allowServerValidator: function() {
-    return ["YARN", "STORM", "MAPREDUCE2", "HIVE", "TEZ"].contains(this.get('serviceName'));
-  }.property('serviceName'),
   /**
    * configCategories are fetched from  App.StackService.configCategories.
    * Also configCategories that does not match any serviceComponent of a service and not included in the permissible default pattern are omitted

http://git-wip-us.apache.org/repos/asf/ambari/blob/db83ea24/ambari-web/app/utils/config.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/utils/config.js b/ambari-web/app/utils/config.js
index 62db148..03bd079 100644
--- a/ambari-web/app/utils/config.js
+++ b/ambari-web/app/utils/config.js
@@ -691,7 +691,7 @@ App.config = Em.Object.create({
       // Use calculated default values for some configs
       var recommendedDefaults = {};
       if (App.get('supports.serverRecommendValidate')) {
-        if (!storedConfigs && service.get('configTypes') && service.get('allowServerValidator')) {
+        if (!storedConfigs && service.get('configTypes')) {
           Object.keys(service.get('configTypes')).forEach(function (type) {
             if (!recommended || !recommended[type]) {
               return;

http://git-wip-us.apache.org/repos/asf/ambari/blob/db83ea24/ambari-web/test/utils/blueprint_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/utils/blueprint_test.js b/ambari-web/test/utils/blueprint_test.js
index 1f8d803..d615896 100644
--- a/ambari-web/test/utils/blueprint_test.js
+++ b/ambari-web/test/utils/blueprint_test.js
@@ -287,7 +287,6 @@ describe('utils/blueprint', function() {
               "yarn-site": {},
               "yarn-env": {}
             },
-            allowServerValidator: true,
             isInstalled: true
           })
         ],


[02/35] git commit: AMBARI-7003. Views : Add description property to view version.

Posted by jo...@apache.org.
AMBARI-7003. Views : Add description property to view version.


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/2fd25004
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/2fd25004
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/2fd25004

Branch: refs/heads/branch-alerts-dev
Commit: 2fd25004b0c783b4838539b1a39862830b282b02
Parents: fefc129
Author: Siddharth Wagle <sw...@hortonworks.com>
Authored: Mon Aug 25 10:27:25 2014 -0700
Committer: Siddharth Wagle <sw...@hortonworks.com>
Committed: Mon Aug 25 11:02:15 2014 -0700

----------------------------------------------------------------------
 .../stackadvisor/StackAdvisorHelper.java.orig   | 116 -------------------
 .../internal/ViewVersionResourceProvider.java   |   3 +
 .../ambari/server/orm/entities/ViewEntity.java  |  39 +++++--
 .../server/upgrade/UpgradeCatalog170.java       |   3 +
 .../server/view/configuration/ViewConfig.java   |  14 +++
 .../main/resources/Ambari-DDL-MySQL-CREATE.sql  |   2 +-
 .../main/resources/Ambari-DDL-Oracle-CREATE.sql |   2 +-
 .../resources/Ambari-DDL-Postgres-CREATE.sql    |   2 +-
 .../Ambari-DDL-Postgres-EMBEDDED-CREATE.sql     |   2 +-
 .../view/configuration/ViewConfigTest.java      |   7 ++
 .../org/apache/ambari/view/ViewDefinition.java  |   7 ++
 11 files changed, 69 insertions(+), 128 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/2fd25004/ambari-server/src/main/java/org/apache/ambari/server/api/services/stackadvisor/StackAdvisorHelper.java.orig
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/api/services/stackadvisor/StackAdvisorHelper.java.orig b/ambari-server/src/main/java/org/apache/ambari/server/api/services/stackadvisor/StackAdvisorHelper.java.orig
deleted file mode 100644
index 213b0f0..0000000
--- a/ambari-server/src/main/java/org/apache/ambari/server/api/services/stackadvisor/StackAdvisorHelper.java.orig
+++ /dev/null
@@ -1,116 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.server.api.services.stackadvisor;
-
-import java.io.File;
-import java.io.IOException;
-
-import org.apache.ambari.server.api.services.stackadvisor.StackAdvisorRequest.StackAdvisorRequestType;
-import org.apache.ambari.server.api.services.stackadvisor.commands.GetComponentLayoutRecommnedationCommand;
-import org.apache.ambari.server.api.services.stackadvisor.commands.GetComponentLayoutValidationCommand;
-import org.apache.ambari.server.api.services.stackadvisor.commands.StackAdvisorCommand;
-import org.apache.ambari.server.api.services.stackadvisor.recommendations.RecommendationResponse;
-import org.apache.ambari.server.api.services.stackadvisor.validations.ValidationResponse;
-import org.apache.ambari.server.configuration.Configuration;
-
-import com.google.inject.Inject;
-import com.google.inject.Singleton;
-
-@Singleton
-public class StackAdvisorHelper {
-
-  private File recommendationsDir;
-  private String stackAdvisorScript;
-
-  /* Monotonically increasing requestid */
-  private int requestId = 0;
-  private StackAdvisorRunner saRunner;
-
-  @Inject
-  public StackAdvisorHelper(Configuration conf, StackAdvisorRunner saRunner) throws IOException {
-    this.recommendationsDir = conf.getRecommendationsDir();
-    this.stackAdvisorScript = conf.getStackAdvisorScript();
-    this.saRunner = saRunner;
-  }
-
-  /**
-   * Returns validation (component-layout or configurations) result for the
-   * request.
-   * 
-   * @param validationRequest the validation request
-   * @return {@link ValidationResponse} instance
-   * @throws StackAdvisorException in case of stack advisor script errors
-   */
-  public synchronized ValidationResponse validate(StackAdvisorRequest request)
-      throws StackAdvisorException {
-    requestId += 1;
-
-    StackAdvisorCommand<ValidationResponse> command = createValidationCommand(request
-        .getRequestType());
-
-    return command.invoke(request);
-  }
-
-  StackAdvisorCommand<ValidationResponse> createValidationCommand(
-      StackAdvisorRequestType requestType) throws StackAdvisorException {
-    StackAdvisorCommand<ValidationResponse> command;
-    if (requestType == StackAdvisorRequestType.HOST_GROUPS) {
-      command = new GetComponentLayoutValidationCommand(recommendationsDir, stackAdvisorScript,
-          requestId, saRunner);
-    } else {
-      throw new StackAdvisorException(String.format("Unsupported request type, type=%s",
-          requestType));
-    }
-
-    return command;
-  }
-
-  /**
-   * Returns recommendation (component-layout or configurations) based on the
-   * request.
-   * 
-   * @param request the recommendation request
-   * @return {@link RecommendationResponse} instance
-   * @throws StackAdvisorException in case of stack advisor script errors
-   */
-  public synchronized RecommendationResponse recommend(StackAdvisorRequest request)
-      throws StackAdvisorException {
-    requestId += 1;
-
-    StackAdvisorCommand<RecommendationResponse> command = createRecommendationCommand(request
-        .getRequestType());
-
-    return command.invoke(request);
-  }
-
-  StackAdvisorCommand<RecommendationResponse> createRecommendationCommand(
-      StackAdvisorRequestType requestType) throws StackAdvisorException {
-    StackAdvisorCommand<RecommendationResponse> command;
-    if (requestType == StackAdvisorRequestType.HOST_GROUPS) {
-      command = new GetComponentLayoutRecommnedationCommand(recommendationsDir, stackAdvisorScript,
-          requestId, saRunner);
-    } else {
-      throw new StackAdvisorException(String.format("Unsupported request type, type=%s",
-          requestType));
-    }
-
-    return command;
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/2fd25004/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ViewVersionResourceProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ViewVersionResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ViewVersionResourceProvider.java
index 5793a1b..d13785f 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ViewVersionResourceProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ViewVersionResourceProvider.java
@@ -47,6 +47,7 @@ public class ViewVersionResourceProvider extends AbstractResourceProvider {
   public static final String VIEW_NAME_PROPERTY_ID      = "ViewVersionInfo/view_name";
   public static final String VIEW_VERSION_PROPERTY_ID   = "ViewVersionInfo/version";
   public static final String LABEL_PROPERTY_ID          = "ViewVersionInfo/label";
+  public static final String DESCRIPTION_PROPERTY_ID    = "ViewVersionInfo/description";
   public static final String VERSION_PROPERTY_ID        = "ViewVersionInfo/version";
   public static final String PARAMETERS_PROPERTY_ID     = "ViewVersionInfo/parameters";
   public static final String ARCHIVE_PROPERTY_ID        = "ViewVersionInfo/archive";
@@ -69,6 +70,7 @@ public class ViewVersionResourceProvider extends AbstractResourceProvider {
     propertyIds.add(VIEW_NAME_PROPERTY_ID);
     propertyIds.add(VIEW_VERSION_PROPERTY_ID);
     propertyIds.add(LABEL_PROPERTY_ID);
+    propertyIds.add(DESCRIPTION_PROPERTY_ID);
     propertyIds.add(VERSION_PROPERTY_ID);
     propertyIds.add(PARAMETERS_PROPERTY_ID);
     propertyIds.add(ARCHIVE_PROPERTY_ID);
@@ -121,6 +123,7 @@ public class ViewVersionResourceProvider extends AbstractResourceProvider {
             setResourceProperty(resource, VIEW_NAME_PROPERTY_ID, viewDefinition.getCommonName(), requestedIds);
             setResourceProperty(resource, VIEW_VERSION_PROPERTY_ID, viewDefinition.getVersion(), requestedIds);
             setResourceProperty(resource, LABEL_PROPERTY_ID, viewDefinition.getLabel(), requestedIds);
+            setResourceProperty(resource, DESCRIPTION_PROPERTY_ID, viewDefinition.getDescription(), requestedIds);
             setResourceProperty(resource, VERSION_PROPERTY_ID, viewDefinition.getVersion(), requestedIds);
             setResourceProperty(resource, PARAMETERS_PROPERTY_ID,
                 viewDefinition.getConfiguration().getParameters(), requestedIds);

http://git-wip-us.apache.org/repos/asf/ambari/blob/2fd25004/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ViewEntity.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ViewEntity.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ViewEntity.java
index da6e2d2..998d2c1 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ViewEntity.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ViewEntity.java
@@ -39,6 +39,7 @@ import javax.persistence.NamedQuery;
 import javax.persistence.OneToMany;
 import javax.persistence.Table;
 import javax.persistence.Transient;
+
 import java.util.Collection;
 import java.util.HashMap;
 import java.util.HashSet;
@@ -69,6 +70,13 @@ public class ViewEntity implements ViewDefinition {
   private String label;
 
   /**
+   * The view description.
+   */
+  @Column
+  @Basic
+  private String description;
+
+  /**
    * The icon path.
    */
   @Column
@@ -226,13 +234,14 @@ public class ViewEntity implements ViewDefinition {
 
     String version = configuration.getVersion();
 
-    this.name    = getViewName(configuration.getName(), version);
-    this.label   = configuration.getLabel();
-    this.version = version;
+    this.name        = getViewName(configuration.getName(), version);
+    this.label       = configuration.getLabel();
+    this.description = configuration.getDescription();
+    this.version     = version;
 
-    this.mask    = configuration.getMasker();
-    this.icon    = configuration.getIcon();
-    this.icon64  = configuration.getIcon64();
+    this.mask        = configuration.getMasker();
+    this.icon        = configuration.getIcon();
+    this.icon64      = configuration.getIcon64();
 
     this.externalResourceType =
         new Resource.Type(getQualifiedResourceTypeName(ResourceConfig.EXTERNAL_RESOURCE_PLURAL_NAME));
@@ -252,6 +261,11 @@ public class ViewEntity implements ViewDefinition {
   }
 
   @Override
+  public String getDescription() {
+    return description;
+  }
+
+  @Override
   public String getVersion() {
     return version;
   }
@@ -294,16 +308,25 @@ public class ViewEntity implements ViewDefinition {
   /**
    * Set the view label (display name).
    *
-   * @param label  the view label
+   * @param label the view label
    */
   public void setLabel(String label) {
     this.label = label;
   }
 
   /**
+   * Set the view description.
+   *
+   * @param description the view description
+   */
+  public void setDescription(String description) {
+    this.description = description;
+  }
+
+  /**
    * Set the view version.
    *
-   * @param version  the version
+   * @param version the version
    */
   public void setVersion(String version) {
     this.version = version;

http://git-wip-us.apache.org/repos/asf/ambari/blob/2fd25004/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog170.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog170.java b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog170.java
index 9f61c3d..eeba932 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog170.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog170.java
@@ -252,6 +252,9 @@ public class UpgradeCatalog170 extends AbstractUpgradeCatalog {
     dbAccessor.addColumn("host_role_command", new DBColumnInfo("error_log",
         String.class, 255, null, true));
 
+    dbAccessor.addColumn("viewmain", new DBColumnInfo("description",
+        String.class, 255, null, true));
+
     addAlertingFrameworkDDL();
 
     //service config versions changes

http://git-wip-us.apache.org/repos/asf/ambari/blob/2fd25004/ambari-server/src/main/java/org/apache/ambari/server/view/configuration/ViewConfig.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/view/configuration/ViewConfig.java b/ambari-server/src/main/java/org/apache/ambari/server/view/configuration/ViewConfig.java
index ddca446..816c178 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/view/configuration/ViewConfig.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/view/configuration/ViewConfig.java
@@ -48,6 +48,11 @@ public class ViewConfig {
   private String label;
 
   /**
+   * The view description.
+   */
+  private String description;
+
+  /**
    * The view version.
    */
   private String version;
@@ -133,6 +138,15 @@ public class ViewConfig {
   }
 
   /**
+   * Get the view description.
+   *
+   * @return the view description
+   */
+  public String getDescription() {
+    return description;
+  }
+
+  /**
    * Get the view version.
    *
    * @return the version

http://git-wip-us.apache.org/repos/asf/ambari/blob/2fd25004/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql
index 0a372b5..29fa041 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql
@@ -65,7 +65,7 @@ CREATE TABLE hostgroup (blueprint_name VARCHAR(255) NOT NULL, name VARCHAR(255)
 CREATE TABLE hostgroup_component (blueprint_name VARCHAR(255) NOT NULL, hostgroup_name VARCHAR(255) NOT NULL, name VARCHAR(255) NOT NULL, PRIMARY KEY(blueprint_name, hostgroup_name, name));
 CREATE TABLE blueprint_configuration (blueprint_name VARCHAR(255) NOT NULL, type_name VARCHAR(255) NOT NULL, config_data VARCHAR(32000) NOT NULL, config_attributes VARCHAR(32000), PRIMARY KEY(blueprint_name, type_name));
 CREATE TABLE hostgroup_configuration (blueprint_name VARCHAR(255) NOT NULL, hostgroup_name VARCHAR(255) NOT NULL, type_name VARCHAR(255) NOT NULL, config_data TEXT NOT NULL, config_attributes TEXT, PRIMARY KEY(blueprint_name, hostgroup_name, type_name));
-CREATE TABLE viewmain (view_name VARCHAR(255) NOT NULL, label VARCHAR(255), version VARCHAR(255), resource_type_id INTEGER NOT NULL, icon VARCHAR(255), icon64 VARCHAR(255), archive VARCHAR(255), mask VARCHAR(255), PRIMARY KEY(view_name));
+CREATE TABLE viewmain (view_name VARCHAR(255) NOT NULL, label VARCHAR(255), description VARCHAR(255), version VARCHAR(255), resource_type_id INTEGER NOT NULL, icon VARCHAR(255), icon64 VARCHAR(255), archive VARCHAR(255), mask VARCHAR(255), PRIMARY KEY(view_name));
 CREATE TABLE viewinstancedata (view_instance_id BIGINT, view_name VARCHAR(255) NOT NULL, view_instance_name VARCHAR(255) NOT NULL, name VARCHAR(255) NOT NULL, user_name VARCHAR(255) NOT NULL, value VARCHAR(2000) NOT NULL, PRIMARY KEY(VIEW_INSTANCE_ID, NAME, USER_NAME));
 CREATE TABLE viewinstance (view_instance_id BIGINT, resource_id BIGINT NOT NULL, view_name VARCHAR(255) NOT NULL, name VARCHAR(255) NOT NULL, label VARCHAR(255), description VARCHAR(255), visible CHAR(1), icon VARCHAR(255), icon64 VARCHAR(255), xml_driven CHAR(1), PRIMARY KEY(view_instance_id));
 CREATE TABLE viewinstanceproperty (view_name VARCHAR(255) NOT NULL, view_instance_name VARCHAR(255) NOT NULL, name VARCHAR(255) NOT NULL, value VARCHAR(2000) NOT NULL, PRIMARY KEY(view_name, view_instance_name, name));

http://git-wip-us.apache.org/repos/asf/ambari/blob/2fd25004/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql
index 817d1e9..457404d 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql
@@ -56,7 +56,7 @@ CREATE TABLE hostgroup (blueprint_name VARCHAR2(255) NOT NULL, name VARCHAR2(255
 CREATE TABLE hostgroup_component (blueprint_name VARCHAR2(255) NOT NULL, hostgroup_name VARCHAR2(255) NOT NULL, name VARCHAR2(255) NOT NULL, PRIMARY KEY(blueprint_name, hostgroup_name, name));
 CREATE TABLE blueprint_configuration (blueprint_name VARCHAR2(255) NOT NULL, type_name VARCHAR2(255) NOT NULL, config_data CLOB NOT NULL, config_attributes CLOB, PRIMARY KEY(blueprint_name, type_name));
 CREATE TABLE hostgroup_configuration (blueprint_name VARCHAR2(255) NOT NULL, hostgroup_name VARCHAR2(255) NOT NULL, type_name VARCHAR2(255) NOT NULL, config_data CLOB NOT NULL, config_attributes CLOB, PRIMARY KEY(blueprint_name, hostgroup_name, type_name));
-CREATE TABLE viewmain (view_name VARCHAR(255) NOT NULL, label VARCHAR(255), version VARCHAR(255), resource_type_id NUMBER(10) NOT NULL, icon VARCHAR(255), icon64 VARCHAR(255), archive VARCHAR(255), mask VARCHAR(255), PRIMARY KEY(view_name));
+CREATE TABLE viewmain (view_name VARCHAR(255) NOT NULL, label VARCHAR(255), description VARCHAR(255), version VARCHAR(255), resource_type_id NUMBER(10) NOT NULL, icon VARCHAR(255), icon64 VARCHAR(255), archive VARCHAR(255), mask VARCHAR(255), PRIMARY KEY(view_name));
 CREATE TABLE viewinstancedata (view_instance_id NUMBER(19), view_name VARCHAR(255) NOT NULL, view_instance_name VARCHAR(255) NOT NULL, name VARCHAR(255) NOT NULL, user_name VARCHAR(255) NOT NULL, value VARCHAR(2000) NOT NULL, PRIMARY KEY(view_instance_id, name, user_name));
 CREATE TABLE viewinstance (view_instance_id NUMBER(19), resource_id NUMBER(19) NOT NULL, view_name VARCHAR(255) NOT NULL, name VARCHAR(255) NOT NULL, label VARCHAR(255), description VARCHAR(255), visible CHAR(1), icon VARCHAR(255), icon64 VARCHAR(255), xml_driven CHAR(1), PRIMARY KEY(view_instance_id));
 CREATE TABLE viewinstanceproperty (view_name VARCHAR(255) NOT NULL, view_instance_name VARCHAR(255) NOT NULL, name VARCHAR(255) NOT NULL, value VARCHAR(2000) NOT NULL, PRIMARY KEY(view_name, view_instance_name, name));

http://git-wip-us.apache.org/repos/asf/ambari/blob/2fd25004/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql
index 1ea4a55..b58fd21 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql
@@ -91,7 +91,7 @@ CREATE TABLE hostgroup_component (blueprint_name VARCHAR(255) NOT NULL, hostgrou
 CREATE TABLE blueprint_configuration (blueprint_name varchar(255) NOT NULL, type_name varchar(255) NOT NULL, config_data varchar(32000) NOT NULL , config_attributes varchar(32000), PRIMARY KEY(blueprint_name, type_name));
 CREATE TABLE hostgroup_configuration (blueprint_name VARCHAR(255) NOT NULL, hostgroup_name VARCHAR(255) NOT NULL, type_name VARCHAR(255) NOT NULL, config_data TEXT NOT NULL, config_attributes varchar(32000), PRIMARY KEY(blueprint_name, hostgroup_name, type_name));
 
-CREATE TABLE viewmain (view_name VARCHAR(255) NOT NULL, label VARCHAR(255), version VARCHAR(255), resource_type_id INTEGER NOT NULL, icon VARCHAR(255), icon64 VARCHAR(255), archive VARCHAR(255), mask VARCHAR(255), PRIMARY KEY(view_name));
+CREATE TABLE viewmain (view_name VARCHAR(255) NOT NULL, label VARCHAR(255), description VARCHAR(255), version VARCHAR(255), resource_type_id INTEGER NOT NULL, icon VARCHAR(255), icon64 VARCHAR(255), archive VARCHAR(255), mask VARCHAR(255), PRIMARY KEY(view_name));
 CREATE TABLE viewinstancedata (view_instance_id BIGINT, view_name VARCHAR(255) NOT NULL, view_instance_name VARCHAR(255) NOT NULL, name VARCHAR(255) NOT NULL, user_name VARCHAR(255) NOT NULL, value VARCHAR(2000) NOT NULL, PRIMARY KEY(view_instance_id, name, user_name));
 CREATE TABLE viewinstance (view_instance_id BIGINT, resource_id BIGINT NOT NULL, view_name VARCHAR(255) NOT NULL, name VARCHAR(255) NOT NULL, label VARCHAR(255), description VARCHAR(255), visible CHAR(1), icon VARCHAR(255), icon64 VARCHAR(255), xml_driven CHAR(1), PRIMARY KEY(view_instance_id));
 CREATE TABLE viewinstanceproperty (view_name VARCHAR(255) NOT NULL, view_instance_name VARCHAR(255) NOT NULL, name VARCHAR(255) NOT NULL, value VARCHAR(2000) NOT NULL, PRIMARY KEY(view_name, view_instance_name, name));

http://git-wip-us.apache.org/repos/asf/ambari/blob/2fd25004/ambari-server/src/main/resources/Ambari-DDL-Postgres-EMBEDDED-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-Postgres-EMBEDDED-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-Postgres-EMBEDDED-CREATE.sql
index ef8a24d..664bbe9 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-Postgres-EMBEDDED-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-Postgres-EMBEDDED-CREATE.sql
@@ -141,7 +141,7 @@ GRANT ALL PRIVILEGES ON TABLE ambari.hostgroup_component TO :username;
 GRANT ALL PRIVILEGES ON TABLE ambari.blueprint_configuration TO :username;
 GRANT ALL PRIVILEGES ON TABLE ambari.hostgroup_configuration TO :username;
 
-CREATE TABLE ambari.viewmain (view_name VARCHAR(255) NOT NULL, label VARCHAR(255), version VARCHAR(255), resource_type_id INTEGER NOT NULL, icon VARCHAR(255), icon64 VARCHAR(255), archive VARCHAR(255), mask VARCHAR(255), PRIMARY KEY(view_name));
+CREATE TABLE ambari.viewmain (view_name VARCHAR(255) NOT NULL, label VARCHAR(255), description VARCHAR(255), version VARCHAR(255), resource_type_id INTEGER NOT NULL, icon VARCHAR(255), icon64 VARCHAR(255), archive VARCHAR(255), mask VARCHAR(255), PRIMARY KEY(view_name));
 CREATE TABLE ambari.viewinstancedata (view_instance_id BIGINT, view_name VARCHAR(255) NOT NULL, view_instance_name VARCHAR(255) NOT NULL, name VARCHAR(255) NOT NULL, user_name VARCHAR(255) NOT NULL, value VARCHAR(2000) NOT NULL, PRIMARY KEY(view_instance_id, name, user_name));
 CREATE TABLE ambari.viewinstance (view_instance_id BIGINT, resource_id BIGINT NOT NULL, view_name VARCHAR(255) NOT NULL, name VARCHAR(255) NOT NULL, label VARCHAR(255), description VARCHAR(255), visible CHAR(1), icon VARCHAR(255), icon64 VARCHAR(255), xml_driven CHAR(1), PRIMARY KEY(view_instance_id));
 CREATE TABLE ambari.viewinstanceproperty (view_name VARCHAR(255) NOT NULL, view_instance_name VARCHAR(255) NOT NULL, name VARCHAR(255) NOT NULL, value VARCHAR(2000) NOT NULL, PRIMARY KEY(view_name, view_instance_name, name));

http://git-wip-us.apache.org/repos/asf/ambari/blob/2fd25004/ambari-server/src/test/java/org/apache/ambari/server/view/configuration/ViewConfigTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/view/configuration/ViewConfigTest.java b/ambari-server/src/test/java/org/apache/ambari/server/view/configuration/ViewConfigTest.java
index ba40027..2ed365e 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/view/configuration/ViewConfigTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/view/configuration/ViewConfigTest.java
@@ -45,6 +45,7 @@ public class ViewConfigTest {
   private static String xml = "<view>\n" +
       "    <name>MY_VIEW</name>\n" +
       "    <label>My View!</label>\n" +
+      "    <description>Description</description>" +
       "    <version>1.0.0</version>\n" +
       "    <icon64>/this/is/the/icon/url/icon64.png</icon64>\n" +
       "    <icon>/this/is/the/icon/url/icon.png</icon>\n" +
@@ -129,6 +130,12 @@ public class ViewConfigTest {
   }
 
   @Test
+  public void testGetDescription() throws Exception {
+    ViewConfig config = getConfig();
+    Assert.assertEquals("Description", config.getDescription());
+  }
+
+  @Test
   public void testGetVersion() throws Exception {
     ViewConfig config = getConfig();
     Assert.assertEquals("1.0.0", config.getVersion());

http://git-wip-us.apache.org/repos/asf/ambari/blob/2fd25004/ambari-views/src/main/java/org/apache/ambari/view/ViewDefinition.java
----------------------------------------------------------------------
diff --git a/ambari-views/src/main/java/org/apache/ambari/view/ViewDefinition.java b/ambari-views/src/main/java/org/apache/ambari/view/ViewDefinition.java
index e519526..b8e7444 100644
--- a/ambari-views/src/main/java/org/apache/ambari/view/ViewDefinition.java
+++ b/ambari-views/src/main/java/org/apache/ambari/view/ViewDefinition.java
@@ -38,6 +38,13 @@ public interface ViewDefinition {
   public String getLabel();
 
   /**
+   * Get the view description.
+   *
+   * @return the description
+   */
+  public String getDescription();
+
+  /**
    * Get the view version.
    *
    * @return the version


[06/35] git commit: AMBARI-7008. Configuration validations for STORM service should be removed

Posted by jo...@apache.org.
AMBARI-7008. Configuration validations for STORM service should be removed


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/98660b9e
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/98660b9e
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/98660b9e

Branch: refs/heads/branch-alerts-dev
Commit: 98660b9e3ee43ce3b2083f010ed55c556980527b
Parents: fb1e0ca
Author: Srimanth Gunturi <sg...@hortonworks.com>
Authored: Mon Aug 25 17:08:48 2014 -0700
Committer: Srimanth Gunturi <sg...@hortonworks.com>
Committed: Mon Aug 25 17:08:48 2014 -0700

----------------------------------------------------------------------
 .../main/resources/stacks/HDP/2.1/services/stack_advisor.py   | 7 -------
 1 file changed, 7 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/98660b9e/ambari-server/src/main/resources/stacks/HDP/2.1/services/stack_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.1/services/stack_advisor.py b/ambari-server/src/main/resources/stacks/HDP/2.1/services/stack_advisor.py
index 5d7a3bc..e4168d7 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.1/services/stack_advisor.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.1/services/stack_advisor.py
@@ -82,7 +82,6 @@ class HDP21StackAdvisor(HDP206StackAdvisor):
     validator = super(HDP21StackAdvisor, self).validateServiceConfigurations(serviceName)
     if validator is None:
       return {
-        "STORM": ["storm-site", self.validateStormConfigurations],
         "HIVE": ["hive-site", self.validateHiveConfigurations],
         "TEZ": ["tez-site", self.validateTezConfigurations]
       }.get(serviceName, None)
@@ -95,12 +94,6 @@ class HDP21StackAdvisor(HDP206StackAdvisor):
                         {"config-name": 'hive.auto.convert.join.noconditionaltask.size', "message": self.validatorLessThenDefaultValue(properties, recommendedDefaults, 'hive.auto.convert.join.noconditionaltask.size')} ]
     return self.toConfigurationValidationErrors(validationItems, "hive-site")
 
-  def validateStormConfigurations(self, properties, recommendedDefaults):
-    validationItems = [ {"config-name": 'drpc.childopts', "message": self.validateXmxValue(properties, recommendedDefaults, 'drpc.childopts')},
-                        {"config-name": 'ui.childopts', "message": self.validateXmxValue(properties, recommendedDefaults, 'ui.childopts')},
-                        {"config-name": 'logviewer.childopts', "message": self.validateXmxValue(properties, recommendedDefaults, 'logviewer.childopts')} ]
-    return self.toConfigurationValidationErrors(validationItems, "storm-site")
-
   def validateTezConfigurations(self, properties, recommendedDefaults):
     validationItems = [ {"config-name": 'tez.am.resource.memory.mb', "message": self.validatorLessThenDefaultValue(properties, recommendedDefaults, 'tez.am.resource.memory.mb')},
                         {"config-name": 'tez.am.java.opts', "message": self.validateXmxValue(properties, recommendedDefaults, 'tez.am.java.opts')} ]


[15/35] AMBARI-6061. OutOfMemoryError during host checks on 2k nodes cluster (dlysnichenko)

Posted by jo...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/9faeaf5a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog170Test.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog170Test.java b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog170Test.java
index 13cb231..1cfba46 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog170Test.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog170Test.java
@@ -134,6 +134,8 @@ public class UpgradeCatalog170Test {
     Capture<DBAccessor.DBColumnInfo> clusterConfigAttributesColumnCapture = new Capture<DBAccessor.DBColumnInfo>();
     Capture<DBAccessor.DBColumnInfo> maskColumnCapture = new Capture<DBAccessor.DBColumnInfo>();
     Capture<DBAccessor.DBColumnInfo> maskedColumnCapture = new Capture<DBAccessor.DBColumnInfo>();
+    Capture<DBAccessor.DBColumnInfo> stageCommandParamsColumnCapture = new Capture<DBAccessor.DBColumnInfo>();
+    Capture<DBAccessor.DBColumnInfo> stageHostParamsColumnCapture = new Capture<DBAccessor.DBColumnInfo>();
     Capture<List<DBAccessor.DBColumnInfo>> alertDefinitionColumnCapture = new Capture<List<DBAccessor.DBColumnInfo>>();
     Capture<List<DBAccessor.DBColumnInfo>> alertHistoryColumnCapture = new Capture<List<DBAccessor.DBColumnInfo>>();
     Capture<List<DBAccessor.DBColumnInfo>> alertCurrentColumnCapture = new Capture<List<DBAccessor.DBColumnInfo>>();
@@ -148,6 +150,7 @@ public class UpgradeCatalog170Test {
     setViewExpectations(dbAccessor, maskColumnCapture);
     setViewParameterExpectations(dbAccessor, maskedColumnCapture);
     setClusterConfigExpectations(dbAccessor, clusterConfigAttributesColumnCapture);
+    setStageExpectations(dbAccessor, stageCommandParamsColumnCapture, stageHostParamsColumnCapture);
 
     dbAccessor.createTable(eq("alert_definition"),
         capture(alertDefinitionColumnCapture), eq("definition_id"));
@@ -200,6 +203,7 @@ public class UpgradeCatalog170Test {
     assertClusterConfigColumns(clusterConfigAttributesColumnCapture);
     assertViewColumns(maskColumnCapture);
     assertViewParameterColumns(maskedColumnCapture);
+    assertStageColumns(stageCommandParamsColumnCapture, stageHostParamsColumnCapture);
 
     assertEquals(12, alertDefinitionColumnCapture.getValue().size());
     assertEquals(11, alertHistoryColumnCapture.getValue().size());
@@ -424,6 +428,17 @@ public class UpgradeCatalog170Test {
     dbAccessor.addColumn(eq("clusterconfig"),
         capture(clusterConfigAttributesColumnCapture));
   }
+ 
+  private void setStageExpectations(DBAccessor dbAccessor,
+                                    Capture<DBAccessor.DBColumnInfo> stageCommandParamsColumnCapture,
+                                    Capture<DBAccessor.DBColumnInfo> stageHostParamsColumnCapture)
+    throws SQLException {
+    dbAccessor.addColumn(eq("stage"),
+      capture(stageCommandParamsColumnCapture));
+
+    dbAccessor.addColumn(eq("stage"),
+      capture(stageHostParamsColumnCapture));
+  }
 
   @Test
   public void testGetSourceVersion() {
@@ -465,4 +480,19 @@ public class UpgradeCatalog170Test {
     assertNull(column.getDefaultValue());
     assertTrue(column.isNullable());
   }
+
+  private void assertStageColumns(Capture<DBAccessor.DBColumnInfo> stageCommandParamsColumnCapture,
+                                  Capture<DBAccessor.DBColumnInfo> stageHostParamsColumnCapture) {
+    DBAccessor.DBColumnInfo column = stageCommandParamsColumnCapture.getValue();
+    assertEquals("command_params", column.getName());
+    assertEquals(byte[].class, column.getType());
+    assertEquals(null, column.getDefaultValue());
+    assertTrue(column.isNullable());
+
+    column = stageHostParamsColumnCapture.getValue();
+    assertEquals("host_params", column.getName());
+    assertEquals(byte[].class, column.getType());
+    assertEquals(null, column.getDefaultValue());
+    assertTrue(column.isNullable());
+  }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/9faeaf5a/ambari-server/src/test/java/org/apache/ambari/server/utils/TestStageUtils.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/utils/TestStageUtils.java b/ambari-server/src/test/java/org/apache/ambari/server/utils/TestStageUtils.java
index 6e587f1..988c67b 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/utils/TestStageUtils.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/utils/TestStageUtils.java
@@ -20,6 +20,8 @@ package org.apache.ambari.server.utils;
 import static org.easymock.EasyMock.expect;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertTrue;
+import static org.powermock.api.easymock.PowerMock.mockStaticPartial;
+import static org.powermock.api.easymock.PowerMock.replayAll;
 
 import java.io.IOException;
 import java.net.UnknownHostException;
@@ -42,8 +44,6 @@ import org.apache.ambari.server.actionmanager.ExecutionCommandWrapper;
 import org.apache.ambari.server.actionmanager.Stage;
 import org.apache.ambari.server.agent.ExecutionCommand;
 import org.apache.ambari.server.api.services.AmbariMetaInfo;
-import org.apache.ambari.server.configuration.Configuration;
-import org.apache.ambari.server.controller.HostsMap;
 import org.apache.ambari.server.orm.GuiceJpaInitializer;
 import org.apache.ambari.server.orm.InMemoryDefaultTestModule;
 import org.apache.ambari.server.state.Cluster;
@@ -63,9 +63,6 @@ import org.junit.runner.RunWith;
 import org.powermock.core.classloader.annotations.PowerMockIgnore;
 import org.powermock.core.classloader.annotations.PrepareForTest;
 import org.powermock.modules.junit4.PowerMockRunner;
-import static org.powermock.api.easymock.PowerMock.replayAll;
-import java.net.InetAddress;
-import static org.powermock.api.easymock.PowerMock.*;
 
 import com.google.common.collect.ContiguousSet;
 import com.google.common.collect.DiscreteDomain;
@@ -124,7 +121,7 @@ public class TestStageUtils {
   @Test
   @Ignore
   public void testGetATestStage() {
-    Stage s = StageUtils.getATestStage(1, 2, "host2");
+    Stage s = StageUtils.getATestStage(1, 2, "host2", "", "hostParamsStage");
     String hostname = s.getHosts().get(0);
     List<ExecutionCommandWrapper> wrappers = s.getExecutionCommands(hostname);
     for (ExecutionCommandWrapper wrapper : wrappers) {
@@ -137,7 +134,7 @@ public class TestStageUtils {
   @Test
   @Ignore
   public void testJaxbToString() throws Exception {
-    Stage s = StageUtils.getATestStage(1, 2, "host1");
+    Stage s = StageUtils.getATestStage(1, 2, "host1", "", "hostParamsStage");
     String hostname = s.getHosts().get(0);
     List<ExecutionCommandWrapper> wrappers = s.getExecutionCommands(hostname);
     for (ExecutionCommandWrapper wrapper : wrappers) {
@@ -150,7 +147,7 @@ public class TestStageUtils {
   @Ignore
   public void testJasonToExecutionCommand() throws JsonGenerationException,
       JsonMappingException, JAXBException, IOException {
-    Stage s = StageUtils.getATestStage(1, 2, "host1", "clusterHostInfo");
+    Stage s = StageUtils.getATestStage(1, 2, "host1", "clusterHostInfo", "hostParamsStage");
     ExecutionCommand cmd = s.getExecutionCommands("host1").get(0).getExecutionCommand();    
     HashMap<String, Map<String,String>> configTags = new HashMap<String, Map<String,String>>();
     Map<String, String> globalTag = new HashMap<String, String>();

http://git-wip-us.apache.org/repos/asf/ambari/blob/9faeaf5a/ambari-web/app/controllers/wizard/step3_controller.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/controllers/wizard/step3_controller.js b/ambari-web/app/controllers/wizard/step3_controller.js
index 88a328e..3510150 100644
--- a/ambari-web/app/controllers/wizard/step3_controller.js
+++ b/ambari-web/app/controllers/wizard/step3_controller.js
@@ -833,7 +833,6 @@ App.WizardStep3Controller = Em.Controller.extend({
       "parameters": {
         "check_execute_list": "host_resolution_check",
         "jdk_location" : jdk_location,
-        "hosts": hosts,
         "threshold": "20"
       }
     };


[32/35] git commit: AMBARI-7032 Jobs View: utilize full width

Posted by jo...@apache.org.
AMBARI-7032 Jobs View: utilize full width


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/8d8583bd
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/8d8583bd
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/8d8583bd

Branch: refs/heads/branch-alerts-dev
Commit: 8d8583bd5b7191af8e500cad3ba507817ecee011
Parents: 4430ebf
Author: aBabiichuk <ab...@cybervisiontech.com>
Authored: Wed Aug 27 14:34:10 2014 +0300
Committer: aBabiichuk <ab...@cybervisiontech.com>
Committed: Wed Aug 27 14:34:23 2014 +0300

----------------------------------------------------------------------
 contrib/views/jobs/src/main/resources/ui/app/styles/main.less | 7 ++++++-
 1 file changed, 6 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/8d8583bd/contrib/views/jobs/src/main/resources/ui/app/styles/main.less
----------------------------------------------------------------------
diff --git a/contrib/views/jobs/src/main/resources/ui/app/styles/main.less b/contrib/views/jobs/src/main/resources/ui/app/styles/main.less
index e648fe0..4c1987f 100644
--- a/contrib/views/jobs/src/main/resources/ui/app/styles/main.less
+++ b/contrib/views/jobs/src/main/resources/ui/app/styles/main.less
@@ -21,13 +21,18 @@
 @import '../../app/bower_components/font-awesome/less/font-awesome';
 
 html {
-  overflow-y: scroll;
+  overflow-y: auto;
 }
 
 a {
   cursor: pointer;
 }
 
+.container,
+.jobs-container {
+  width: 100%;
+}
+
 .jobs-container {
   margin-top: 20px;
 }


[28/35] git commit: AMBARI-7027. Config History: need current version for each config group. (myroslav via mahadev)

Posted by jo...@apache.org.
AMBARI-7027. Config History: need current version for each config group. (myroslav via mahadev)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/a8e6736a
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/a8e6736a
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/a8e6736a

Branch: refs/heads/branch-alerts-dev
Commit: a8e6736ac536f08e86503d037446a4d793db1029
Parents: 0e9c744
Author: Mahadev Konar <ma...@apache.org>
Authored: Tue Aug 26 17:30:30 2014 -0700
Committer: Mahadev Konar <ma...@apache.org>
Committed: Tue Aug 26 17:30:30 2014 -0700

----------------------------------------------------------------------
 .../ServiceConfigVersionResourceDefinition.java | 10 +--
 .../AmbariManagementControllerImpl.java         | 16 ++++-
 .../server/controller/ClusterRequest.java       | 12 ++--
 .../server/controller/ClusterResponse.java      |  9 ++-
 .../server/controller/ConfigGroupRequest.java   |  9 +++
 .../controller/ServiceConfigVersionRequest.java | 23 +++++++
 .../ServiceConfigVersionResponse.java           | 13 +++-
 .../internal/ClusterResourceProvider.java       | 40 +++++++-----
 .../internal/ConfigGroupResourceProvider.java   |  4 ++
 .../ServiceConfigVersionResourceProvider.java   | 12 ++--
 .../ambari/server/orm/dao/ServiceConfigDAO.java | 41 +++++++++++-
 .../org/apache/ambari/server/state/Cluster.java |  6 +-
 .../server/state/cluster/ClusterImpl.java       | 67 ++++++++++++++++----
 .../src/main/resources/properties.json          |  5 +-
 .../server/state/cluster/ClusterTest.java       | 13 ++--
 15 files changed, 214 insertions(+), 66 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/a8e6736a/ambari-server/src/main/java/org/apache/ambari/server/api/resources/ServiceConfigVersionResourceDefinition.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/api/resources/ServiceConfigVersionResourceDefinition.java b/ambari-server/src/main/java/org/apache/ambari/server/api/resources/ServiceConfigVersionResourceDefinition.java
index f5d07f6..ef4108c 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/api/resources/ServiceConfigVersionResourceDefinition.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/api/resources/ServiceConfigVersionResourceDefinition.java
@@ -43,12 +43,12 @@ public class ServiceConfigVersionResourceDefinition extends BaseResourceDefiniti
 
   @Override
   public String getPluralName() {
-    return "serviceconfigversions";
+    return "service_config_versions";
   }
 
   @Override
   public String getSingularName() {
-    return "serviceconfigversion";
+    return "service_config_version";
   }
 
   private class HrefProcessor extends BaseHrefPostProcessor {
@@ -66,10 +66,10 @@ public class ServiceConfigVersionResourceDefinition extends BaseResourceDefiniti
         idx = href.indexOf("/", idx) + 1;
 
         String serviceName = (String) resultNode.getObject().getPropertyValue("service_name");
-        Long version = (Long) resultNode.getObject().getPropertyValue("serviceconfigversion");
+        Long version = (Long) resultNode.getObject().getPropertyValue("service_config_version");
         href = href.substring(0, idx)
-            + "configurations/serviceconfigversions?service_name="
-            + serviceName + "&serviceconfigversion=" + version;
+            + "configurations/service_config_versions?service_name="
+            + serviceName + "&service_config_version=" + version;
 
         resultNode.setProperty("href", href);
       } else {

http://git-wip-us.apache.org/repos/asf/ambari/blob/a8e6736a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java
index 394f6ad..f7f2f2d 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java
@@ -140,6 +140,7 @@ import org.slf4j.LoggerFactory;
 
 import com.google.common.cache.Cache;
 import com.google.common.cache.CacheBuilder;
+import com.google.common.collect.Multimaps;
 import com.google.gson.Gson;
 import com.google.inject.Inject;
 import com.google.inject.Injector;
@@ -1156,6 +1157,12 @@ public class AmbariManagementControllerImpl implements AmbariManagementControlle
       new LinkedList<ConfigurationResponse>();
     ServiceConfigVersionResponse serviceConfigVersionResponse = null;
 
+    if (request.getDesiredConfig() != null && request.getServiceConfigVersionRequest() != null) {
+      String msg = "Unable to set desired configs and rollback at same time, request = " + request.toString();
+      LOG.error(msg);
+      throw new IllegalArgumentException(msg);
+    }
+
     // set or create configuration mapping (and optionally create the map of properties)
     if (null != request.getDesiredConfig()) {
       Set<Config> configs = new HashSet<Config>();
@@ -1259,7 +1266,7 @@ public class AmbariManagementControllerImpl implements AmbariManagementControlle
         throw new IllegalArgumentException(msg);
       }
 
-      cluster.setServiceConfigVersion(serviceConfigVersionRequest.getServiceName(),
+      serviceConfigVersionResponse = cluster.setServiceConfigVersion(serviceConfigVersionRequest.getServiceName(),
           serviceConfigVersionRequest.getVersion(), getAuthName(),
           serviceConfigVersionRequest.getNote());
     }
@@ -1272,8 +1279,11 @@ public class AmbariManagementControllerImpl implements AmbariManagementControlle
       ClusterResponse clusterResponse =
           new ClusterResponse(cluster.getClusterId(), cluster.getClusterName(), null, null, null, null, null);
 
-      clusterResponse.setDesiredServiceConfigVersions(
-          Collections.singletonMap(serviceConfigVersionResponse.getServiceName(), serviceConfigVersionResponse));
+      Map<String, Collection<ServiceConfigVersionResponse>> map =
+        new HashMap<String, Collection<ServiceConfigVersionResponse>>();
+      map.put(serviceConfigVersionResponse.getServiceName(), Collections.singletonList(serviceConfigVersionResponse));
+
+      clusterResponse.setDesiredServiceConfigVersions(map);
 
       //workaround to be able to retrieve update results in resource provider
       //as this method only expected to return request response

http://git-wip-us.apache.org/repos/asf/ambari/blob/a8e6736a/ambari-server/src/main/java/org/apache/ambari/server/controller/ClusterRequest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/ClusterRequest.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/ClusterRequest.java
index caafb25..8bbbd68 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/ClusterRequest.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/ClusterRequest.java
@@ -148,12 +148,12 @@ public class ClusterRequest {
   @Override
   public String toString() {
     StringBuilder sb = new StringBuilder();
-    sb.append("{"
-        + " clusterName=" + clusterName
-        + ", clusterId=" + clusterId
-        + ", provisioningState=" + provisioningState
-        + ", stackVersion=" + stackVersion
-        + ", hosts=[");
+    sb.append("{" + " clusterName=").append(clusterName)
+        .append(", clusterId=").append(clusterId)
+        .append(", provisioningState=").append(provisioningState)
+        .append(", stackVersion=").append(stackVersion)
+        .append(", desired_scv=").append(serviceConfigVersionRequest)
+        .append(", hosts=[");
     if (hostNames != null) {
       int i = 0;
       for (String hostName : hostNames) {

http://git-wip-us.apache.org/repos/asf/ambari/blob/a8e6736a/ambari-server/src/main/java/org/apache/ambari/server/controller/ClusterResponse.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/ClusterResponse.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/ClusterResponse.java
index dc83474..2c233e6 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/ClusterResponse.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/ClusterResponse.java
@@ -18,9 +18,12 @@
 
 package org.apache.ambari.server.controller;
 
+import java.util.Collection;
+import java.util.List;
 import java.util.Map;
 import java.util.Set;
 
+import com.google.common.collect.Multimap;
 import org.apache.ambari.server.state.DesiredConfig;
 import org.apache.ambari.server.state.State;
 import org.apache.ambari.server.state.ClusterHealthReport;
@@ -37,7 +40,7 @@ public class ClusterResponse {
 
   private Map<String, DesiredConfig> desiredConfigs;
 
-  private Map<String, ServiceConfigVersionResponse> desiredServiceConfigVersions;
+  private Map<String, Collection<ServiceConfigVersionResponse>> desiredServiceConfigVersions;
   
   private String provisioningState;
 
@@ -180,11 +183,11 @@ public class ClusterResponse {
     return clusterHealthReport;
   }
 
-  public Map<String, ServiceConfigVersionResponse> getDesiredServiceConfigVersions() {
+  public Map<String, Collection<ServiceConfigVersionResponse>> getDesiredServiceConfigVersions() {
     return desiredServiceConfigVersions;
   }
 
-  public void setDesiredServiceConfigVersions(Map<String, ServiceConfigVersionResponse> desiredServiceConfigVersions) {
+  public void setDesiredServiceConfigVersions(Map<String, Collection<ServiceConfigVersionResponse>> desiredServiceConfigVersions) {
     this.desiredServiceConfigVersions = desiredServiceConfigVersions;
   }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/a8e6736a/ambari-server/src/main/java/org/apache/ambari/server/controller/ConfigGroupRequest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/ConfigGroupRequest.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/ConfigGroupRequest.java
index 4c0d3a2..efa1a7e 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/ConfigGroupRequest.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/ConfigGroupRequest.java
@@ -29,6 +29,7 @@ public class ConfigGroupRequest {
   private String groupName;
   private String tag;
   private String description;
+  private String serviceConfigVersionNote;
   private Set<String> hosts;
   private Map<String, Config> configs;
 
@@ -99,4 +100,12 @@ public class ConfigGroupRequest {
   public void setId(Long id) {
     this.id = id;
   }
+
+  public String getServiceConfigVersionNote() {
+    return serviceConfigVersionNote;
+  }
+
+  public void setServiceConfigVersionNote(String serviceConfigVersionNote) {
+    this.serviceConfigVersionNote = serviceConfigVersionNote;
+  }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/a8e6736a/ambari-server/src/main/java/org/apache/ambari/server/controller/ServiceConfigVersionRequest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/ServiceConfigVersionRequest.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/ServiceConfigVersionRequest.java
index 3d51468..b32ccdf 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/ServiceConfigVersionRequest.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/ServiceConfigVersionRequest.java
@@ -26,6 +26,7 @@ public class ServiceConfigVersionRequest {
   private Long applyTime;
   private String userName;
   private String note;
+  private Boolean isCurrent;
 
   public ServiceConfigVersionRequest() {
   }
@@ -94,4 +95,26 @@ public class ServiceConfigVersionRequest {
   public void setNote(String note) {
     this.note = note;
   }
+
+  public Boolean getIsCurrent() {
+    return isCurrent;
+  }
+
+  public void setIsCurrent(Boolean isCurrent) {
+    this.isCurrent = isCurrent;
+  }
+
+  @Override
+  public String toString() {
+    return "ServiceConfigVersionRequest{" +
+        "clusterName='" + clusterName + '\'' +
+        ", serviceName='" + serviceName + '\'' +
+        ", version=" + version +
+        ", createTime=" + createTime +
+        ", applyTime=" + applyTime +
+        ", userName='" + userName + '\'' +
+        ", note='" + note + '\'' +
+        ", isCurrent=" + isCurrent +
+        '}';
+  }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/a8e6736a/ambari-server/src/main/java/org/apache/ambari/server/controller/ServiceConfigVersionResponse.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/ServiceConfigVersionResponse.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/ServiceConfigVersionResponse.java
index 49921bc..7aea65e 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/ServiceConfigVersionResponse.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/ServiceConfigVersionResponse.java
@@ -19,7 +19,6 @@
 package org.apache.ambari.server.controller;
 
 
-import org.apache.ambari.server.state.Config;
 import org.codehaus.jackson.annotate.JsonProperty;
 import org.codehaus.jackson.map.annotate.JsonSerialize;
 
@@ -34,6 +33,7 @@ public class ServiceConfigVersionResponse {
   private String groupName;
   private String userName;
   private String note;
+  private Boolean isCurrent = false;
   private List<ConfigurationResponse> configurations;
   private List<String> hosts;
 
@@ -46,7 +46,7 @@ public class ServiceConfigVersionResponse {
     this.serviceName = serviceName;
   }
 
-  @JsonProperty("serviceconfigversion")
+  @JsonProperty("service_config_version")
   public Long getVersion() {
     return version;
   }
@@ -130,5 +130,14 @@ public class ServiceConfigVersionResponse {
   public void setGroupId(Long groupId) {
     this.groupId = groupId;
   }
+
+  @JsonProperty("is_current")
+  public Boolean getIsCurrent() {
+    return isCurrent;
+  }
+
+  public void setIsCurrent(Boolean isCurrent) {
+    this.isCurrent = isCurrent;
+  }
 }
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/a8e6736a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClusterResourceProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClusterResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClusterResourceProvider.java
index 3498ffb..113e7b8 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClusterResourceProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClusterResourceProvider.java
@@ -64,7 +64,7 @@ public class ClusterResourceProvider extends BaseBlueprintProcessor {
   protected static final String CLUSTER_VERSION_PROPERTY_ID = PropertyHelper.getPropertyId("Clusters", "version");
   protected static final String CLUSTER_PROVISIONING_STATE_PROPERTY_ID = PropertyHelper.getPropertyId("Clusters", "provisioning_state");
   protected static final String CLUSTER_DESIRED_CONFIGS_PROPERTY_ID = PropertyHelper.getPropertyId("Clusters", "desired_configs");
-  protected static final String CLUSTER_DESIRED_SERVICE_CONFIG_VERSIONS_PROPERTY_ID = PropertyHelper.getPropertyId("Clusters", "desired_serviceconfigversions");
+  protected static final String CLUSTER_DESIRED_SERVICE_CONFIG_VERSIONS_PROPERTY_ID = PropertyHelper.getPropertyId("Clusters", "desired_service_config_versions");
   protected static final String CLUSTER_TOTAL_HOSTS_PROPERTY_ID = PropertyHelper.getPropertyId("Clusters", "total_hosts");
   protected static final String CLUSTER_HEALTH_REPORT_PROPERTY_ID = PropertyHelper.getPropertyId("Clusters", "health_report");
   protected static final String BLUEPRINT_PROPERTY_ID = PropertyHelper.getPropertyId(null, "blueprint");
@@ -173,7 +173,8 @@ public class ClusterResourceProvider extends BaseBlueprintProcessor {
       setResourceProperty(resource, CLUSTER_NAME_PROPERTY_ID, clusterName, requestedIds);
       setResourceProperty(resource, CLUSTER_PROVISIONING_STATE_PROPERTY_ID, response.getProvisioningState(), requestedIds);
       setResourceProperty(resource, CLUSTER_DESIRED_CONFIGS_PROPERTY_ID, response.getDesiredConfigs(), requestedIds);
-      setResourceProperty(resource, CLUSTER_DESIRED_SERVICE_CONFIG_VERSIONS_PROPERTY_ID, response.getDesiredServiceConfigVersions(), requestedIds);
+      setResourceProperty(resource, CLUSTER_DESIRED_SERVICE_CONFIG_VERSIONS_PROPERTY_ID,
+        response.getDesiredServiceConfigVersions(), requestedIds);
       setResourceProperty(resource, CLUSTER_TOTAL_HOSTS_PROPERTY_ID, response.getTotalHosts(), requestedIds);
       setResourceProperty(resource, CLUSTER_HEALTH_REPORT_PROPERTY_ID, response.getClusterHealthReport(), requestedIds);
 
@@ -219,24 +220,29 @@ public class ClusterResourceProvider extends BaseBlueprintProcessor {
     for (ClusterRequest clusterRequest : requests) {
       ClusterResponse updateResults = getManagementController().getClusterUpdateResults(clusterRequest);
       if (updateResults != null) {
-        Map<String, ServiceConfigVersionResponse> serviceConfigVersions = updateResults.getDesiredServiceConfigVersions();
+        Map<String, Collection<ServiceConfigVersionResponse>> serviceConfigVersions = updateResults.getDesiredServiceConfigVersions();
         if (serviceConfigVersions != null) {
           associatedResources = new HashSet<Resource>();
-          for (Map.Entry<String, ServiceConfigVersionResponse> stringServiceConfigVersionResponseEntry : serviceConfigVersions.entrySet()) {
-            Resource resource = new ResourceImpl(Resource.Type.ServiceConfigVersion);
-            ServiceConfigVersionResponse serviceConfigVersionResponse = stringServiceConfigVersionResponseEntry.getValue();
-            resource.setProperty(ServiceConfigVersionResourceProvider.SERVICE_CONFIG_VERSION_SERVICE_NAME_PROPERTY_ID,
-              serviceConfigVersionResponse.getServiceName());
-            resource.setProperty(ServiceConfigVersionResourceProvider.SERVICE_CONFIG_VERSION_PROPERTY_ID,
-              serviceConfigVersionResponse.getVersion());
-            resource.setProperty(ServiceConfigVersionResourceProvider.SERVICE_CONFIG_VERSION_NOTE_PROPERTY_ID,
-              serviceConfigVersionResponse.getNote());
-            if (serviceConfigVersionResponse.getConfigurations() != null) {
-              resource.setProperty(
+          for (Collection<ServiceConfigVersionResponse> scvCollection : serviceConfigVersions.values()) {
+            for (ServiceConfigVersionResponse serviceConfigVersionResponse : scvCollection) {
+              Resource resource = new ResourceImpl(Resource.Type.ServiceConfigVersion);
+              resource.setProperty(ServiceConfigVersionResourceProvider.SERVICE_CONFIG_VERSION_SERVICE_NAME_PROPERTY_ID,
+                serviceConfigVersionResponse.getServiceName());
+              resource.setProperty(ServiceConfigVersionResourceProvider.SERVICE_CONFIG_VERSION_PROPERTY_ID,
+                serviceConfigVersionResponse.getVersion());
+              resource.setProperty(ServiceConfigVersionResourceProvider.SERVICE_CONFIG_VERSION_NOTE_PROPERTY_ID,
+                serviceConfigVersionResponse.getNote());
+              resource.setProperty(ServiceConfigVersionResourceProvider.SERVICE_CONFIG_VERSION_GROUP_ID_PROPERTY_ID,
+                  serviceConfigVersionResponse.getGroupId());
+              resource.setProperty(ServiceConfigVersionResourceProvider.SERVICE_CONFIG_VERSION_GROUP_NAME_PROPERTY_ID,
+                  serviceConfigVersionResponse.getGroupName());
+              if (serviceConfigVersionResponse.getConfigurations() != null) {
+                resource.setProperty(
                   ServiceConfigVersionResourceProvider.SERVICE_CONFIG_VERSION_CONFIGURATIONS_PROPERTY_ID,
                   serviceConfigVersionResponse.getConfigurations());
+              }
+              associatedResources.add(resource);
             }
-            associatedResources.add(resource);
           }
 
         }
@@ -354,13 +360,13 @@ public class ClusterResourceProvider extends BaseBlueprintProcessor {
       String absCategory = PropertyHelper.getPropertyCategory(entry.getKey());
       String propName = PropertyHelper.getPropertyName(entry.getKey());
 
-      if (absCategory.startsWith(parentCategory + "/desired_serviceconfigversions")) {
+      if (absCategory.startsWith(parentCategory + "/desired_service_config_version")) {
         serviceConfigVersionRequest =
             (serviceConfigVersionRequest ==null ) ? new ServiceConfigVersionRequest() : serviceConfigVersionRequest;
 
         if (propName.equals("service_name"))
           serviceConfigVersionRequest.setServiceName(entry.getValue().toString());
-        else if (propName.equals("serviceconfigversion"))
+        else if (propName.equals("service_config_version"))
           serviceConfigVersionRequest.setVersion(Long.valueOf(entry.getValue().toString()));
         else if (propName.equals("service_config_version_note")) {
           serviceConfigVersionRequest.setNote(entry.getValue().toString());

http://git-wip-us.apache.org/repos/asf/ambari/blob/a8e6736a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ConfigGroupResourceProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ConfigGroupResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ConfigGroupResourceProvider.java
index c803c5a..e1d73e6 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ConfigGroupResourceProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ConfigGroupResourceProvider.java
@@ -74,6 +74,8 @@ public class ConfigGroupResourceProvider extends
     .getPropertyId("ConfigGroup", "tag");
   protected static final String CONFIGGROUP_DESC_PROPERTY_ID = PropertyHelper
     .getPropertyId("ConfigGroup", "description");
+  protected static final String CONFIGGROUP_SCV_NOTE_ID = PropertyHelper
+      .getPropertyId("ConfigGroup", "service_config_version_note");
   protected static final String CONFIGGROUP_HOSTNAME_PROPERTY_ID =
     PropertyHelper.getPropertyId(null, "host_name");
   public static final String CONFIGGROUP_HOSTS_PROPERTY_ID = PropertyHelper
@@ -595,6 +597,8 @@ public class ConfigGroupResourceProvider extends
       null,
       null);
 
+    request.setServiceConfigVersionNote((String) properties.get(CONFIGGROUP_SCV_NOTE_ID));
+
     Map<String, Config> configurations = new HashMap<String, Config>();
     Set<String> hosts = new HashSet<String>();
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/a8e6736a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ServiceConfigVersionResourceProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ServiceConfigVersionResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ServiceConfigVersionResourceProvider.java
index 3b7798e..f284030 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ServiceConfigVersionResourceProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ServiceConfigVersionResourceProvider.java
@@ -26,21 +26,21 @@ import org.apache.ambari.server.controller.ServiceConfigVersionRequest;
 import org.apache.ambari.server.controller.ServiceConfigVersionResponse;
 import org.apache.ambari.server.controller.spi.*;
 import org.apache.ambari.server.controller.utilities.PropertyHelper;
-import org.apache.ambari.server.state.Config;
 
 import java.util.*;
 
 public class ServiceConfigVersionResourceProvider extends
     AbstractControllerResourceProvider {
   public static final String SERVICE_CONFIG_VERSION_CLUSTER_NAME_PROPERTY_ID = PropertyHelper.getPropertyId(null, "cluster_name");
-  public static final String SERVICE_CONFIG_VERSION_PROPERTY_ID = PropertyHelper.getPropertyId(null, "serviceconfigversion");
+  public static final String SERVICE_CONFIG_VERSION_PROPERTY_ID = PropertyHelper.getPropertyId(null, "service_config_version");
   public static final String SERVICE_CONFIG_VERSION_SERVICE_NAME_PROPERTY_ID = PropertyHelper.getPropertyId(null, "service_name");
   public static final String SERVICE_CONFIG_VERSION_CREATE_TIME_PROPERTY_ID = PropertyHelper.getPropertyId(null, "createtime");
   public static final String SERVICE_CONFIG_VERSION_USER_PROPERTY_ID = PropertyHelper.getPropertyId(null, "user");
   public static final String SERVICE_CONFIG_VERSION_NOTE_PROPERTY_ID = PropertyHelper.getPropertyId(null, "service_config_version_note");
   public static final String SERVICE_CONFIG_VERSION_GROUP_ID_PROPERTY_ID = PropertyHelper.getPropertyId(null, "group_id");
   public static final String SERVICE_CONFIG_VERSION_GROUP_NAME_PROPERTY_ID = PropertyHelper.getPropertyId(null, "group_name");
-  public static final String SERVICE_CONFIG_VERSION_HOSTNAMES_PROPERTY_ID = PropertyHelper.getPropertyId(null, "hosts");
+  public static final String SERVICE_CONFIG_VERSION_IS_CURRENT_PROPERTY_ID = PropertyHelper.getPropertyId(null, "is_current");
+  public static final String SERVICE_CONFIG_VERSION_HOSTS_PROPERTY_ID = PropertyHelper.getPropertyId(null, "hosts");
   public static final String SERVICE_CONFIG_VERSION_CONFIGURATIONS_PROPERTY_ID = PropertyHelper.getPropertyId(null, "configurations");
 
   /**
@@ -106,7 +106,8 @@ public class ServiceConfigVersionResourceProvider extends
       resource.setProperty(SERVICE_CONFIG_VERSION_NOTE_PROPERTY_ID, response.getNote());
       resource.setProperty(SERVICE_CONFIG_VERSION_GROUP_ID_PROPERTY_ID, response.getGroupId());
       resource.setProperty(SERVICE_CONFIG_VERSION_GROUP_NAME_PROPERTY_ID, response.getGroupName());
-      resource.setProperty(SERVICE_CONFIG_VERSION_HOSTNAMES_PROPERTY_ID, response.getHosts());
+      resource.setProperty(SERVICE_CONFIG_VERSION_HOSTS_PROPERTY_ID, response.getHosts());
+      resource.setProperty(SERVICE_CONFIG_VERSION_IS_CURRENT_PROPERTY_ID, response.getIsCurrent());
 
       resources.add(resource);
     }
@@ -134,12 +135,13 @@ public class ServiceConfigVersionResourceProvider extends
 
     for (String propertyId : propertyIds) {
 
-      if (!propertyId.equals("cluster_name") && !propertyId.equals("serviceconfigversion") &&
+      if (!propertyId.equals("cluster_name") && !propertyId.equals("service_config_version") &&
           !propertyId.equals("service_name") && !propertyId.equals("createtime") &&
           !propertyId.equals("appliedtime") && !propertyId.equals("user") &&
           !propertyId.equals("service_config_version_note") &&
           !propertyId.equals("group_id") &&
           !propertyId.equals("group_name") &&
+          !propertyId.equals("is_current") &&
           !propertyId.equals("hosts")) {
 
         unsupportedProperties.add(propertyId);

http://git-wip-us.apache.org/repos/asf/ambari/blob/a8e6736a/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/ServiceConfigDAO.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/ServiceConfigDAO.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/ServiceConfigDAO.java
index 9d13eb2..1b9d33d 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/ServiceConfigDAO.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/ServiceConfigDAO.java
@@ -26,7 +26,15 @@ import org.apache.ambari.server.orm.RequiresSession;
 import org.apache.ambari.server.orm.entities.ServiceConfigEntity;
 
 import javax.persistence.EntityManager;
+import javax.persistence.Tuple;
 import javax.persistence.TypedQuery;
+import javax.persistence.criteria.CriteriaBuilder;
+import javax.persistence.criteria.CriteriaQuery;
+import javax.persistence.criteria.Root;
+import javax.persistence.criteria.Subquery;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Collections;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
@@ -67,6 +75,37 @@ public class ServiceConfigDAO {
   }
 
   @RequiresSession
+  public List<ServiceConfigEntity> getLastServiceConfigVersionsForGroups(Collection<Long> configGroupIds) {
+    if (configGroupIds == null || configGroupIds.isEmpty()) {
+      return Collections.emptyList();
+    }
+    CriteriaBuilder cb = entityManagerProvider.get().getCriteriaBuilder();
+    CriteriaQuery<Tuple> cq = cb.createTupleQuery();
+    Root<ServiceConfigEntity> groupVersion = cq.from(ServiceConfigEntity.class);
+
+
+    cq.multiselect(groupVersion.get("groupId").alias("groupId"), cb.max(groupVersion.<Long>get("version")).alias("lastVersion"));
+    cq.where(groupVersion.get("groupId").in(configGroupIds));
+    cq.groupBy(groupVersion.get("groupId"));
+    List<Tuple> tuples = daoUtils.selectList(entityManagerProvider.get().createQuery(cq));
+    List<ServiceConfigEntity> result = new ArrayList<ServiceConfigEntity>();
+    //subquery look to be very poor, no bulk select then, cache should help here as result size is naturally limited
+    for (Tuple tuple : tuples) {
+      CriteriaQuery<ServiceConfigEntity> sce = cb.createQuery(ServiceConfigEntity.class);
+      Root<ServiceConfigEntity> sceRoot = sce.from(ServiceConfigEntity.class);
+
+      sce.where(cb.and(cb.equal(sceRoot.get("groupId"), tuple.get("groupId")),
+        cb.equal(sceRoot.get("version"), tuple.get("lastVersion"))));
+      sce.select(sceRoot);
+      result.add(daoUtils.selectSingle(entityManagerProvider.get().createQuery(sce)));
+    }
+
+    return result;
+  }
+
+
+
+  @RequiresSession
   public List<Long> getServiceConfigVersionsByConfig(Long clusterId, String configType, Long configVersion) {
     TypedQuery<Long> query = entityManagerProvider.get().createQuery("SELECT scv.version " +
         "FROM ServiceConfigEntity scv JOIN scv.clusterConfigEntities cc " +
@@ -80,7 +119,7 @@ public class ServiceConfigDAO {
       createQuery("SELECT scv FROM ServiceConfigEntity scv " +
         "WHERE scv.clusterId = ?1 AND scv.createTimestamp = (" +
         "SELECT MAX(scv2.createTimestamp) FROM ServiceConfigEntity scv2 " +
-        "WHERE scv2.serviceName = scv.serviceName AND scv2.clusterId = ?1)",
+        "WHERE scv2.serviceName = scv.serviceName AND scv2.clusterId = ?1 AND scv2.groupId IS NULL)",
         ServiceConfigEntity.class);
 
     return daoUtils.selectList(query, clusterId);

http://git-wip-us.apache.org/repos/asf/ambari/blob/a8e6736a/ambari-server/src/main/java/org/apache/ambari/server/state/Cluster.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/Cluster.java b/ambari-server/src/main/java/org/apache/ambari/server/state/Cluster.java
index 2c83f1c..77ab0eb 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/Cluster.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/Cluster.java
@@ -186,16 +186,16 @@ public interface Cluster {
    * @param version service config version
    * @param user the user making the change for audit purposes
    * @param note
-   * @return true if service config version applied
+   * @return service config version created
    * @throws AmbariException
    */
-  boolean setServiceConfigVersion(String serviceName, Long version, String user, String note) throws AmbariException;
+  ServiceConfigVersionResponse setServiceConfigVersion(String serviceName, Long version, String user, String note) throws AmbariException;
 
   /**
    * Get currently active service config versions for stack services
    * @return
    */
-  Map<String, ServiceConfigVersionResponse> getActiveServiceConfigVersions();
+  Map<String, Collection<ServiceConfigVersionResponse>> getActiveServiceConfigVersions();
 
   /**
    * Get service config version history

http://git-wip-us.apache.org/repos/asf/ambari/blob/a8e6736a/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClusterImpl.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClusterImpl.java b/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClusterImpl.java
index 8e073f1..c3b6a86 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClusterImpl.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClusterImpl.java
@@ -1505,7 +1505,7 @@ public class ClusterImpl implements Cluster {
   }
 
   @Override
-  public boolean setServiceConfigVersion(String serviceName, Long version, String user, String note) throws AmbariException {
+  public ServiceConfigVersionResponse setServiceConfigVersion(String serviceName, Long version, String user, String note) throws AmbariException {
     if (null == user)
       throw new NullPointerException("User must be specified.");
 
@@ -1513,9 +1513,9 @@ public class ClusterImpl implements Cluster {
     try {
       readWriteLock.writeLock().lock();
       try {
-        applyServiceConfigVersion(serviceName, version, user, note);
+        ServiceConfigVersionResponse serviceConfigVersionResponse = applyServiceConfigVersion(serviceName, version, user, note);
         configHelper.invalidateStaleConfigsCache();
-        return true;
+        return serviceConfigVersionResponse;
       } finally {
         readWriteLock.writeLock().unlock();
       }
@@ -1525,18 +1525,21 @@ public class ClusterImpl implements Cluster {
   }
 
   @Override
-  public Map<String, ServiceConfigVersionResponse> getActiveServiceConfigVersions() {
+  public Map<String, Collection<ServiceConfigVersionResponse>> getActiveServiceConfigVersions() {
     clusterGlobalLock.readLock().lock();
     try {
       readWriteLock.readLock().lock();
       try {
-        Map<String, ServiceConfigVersionResponse> result = new HashMap<String, ServiceConfigVersionResponse>();
+        Map<String, Collection<ServiceConfigVersionResponse>> map = new HashMap<String, Collection<ServiceConfigVersionResponse>>();
 
         Set<ServiceConfigVersionResponse> responses = getActiveServiceConfigVersionSet();
         for (ServiceConfigVersionResponse response : responses) {
-          result.put(response.getServiceName(), response);
+          if (map.get(response.getServiceName()) == null) {
+            map.put(response.getServiceName(), new ArrayList<ServiceConfigVersionResponse>());
+          }
+          map.get(response.getServiceName()).add(response);
         }
-        return result;
+        return map;
       } finally {
         readWriteLock.readLock().unlock();
       }
@@ -1553,6 +1556,8 @@ public class ClusterImpl implements Cluster {
       readWriteLock.readLock().lock();
       try {
         List<ServiceConfigVersionResponse> serviceConfigVersionResponses = new ArrayList<ServiceConfigVersionResponse>();
+        Set<Long> activeIds = getActiveServiceConfigVersionIds();
+
         for (ServiceConfigEntity serviceConfigEntity : serviceConfigDAO.getServiceConfigs(getClusterId())) {
           ServiceConfigVersionResponse serviceConfigVersionResponse = new ServiceConfigVersionResponse();
 
@@ -1564,6 +1569,7 @@ public class ClusterImpl implements Cluster {
           serviceConfigVersionResponse.setNote(serviceConfigEntity.getNote());
           serviceConfigVersionResponse.setHosts(serviceConfigEntity.getHostNames());
           serviceConfigVersionResponse.setConfigurations(new ArrayList<ConfigurationResponse>());
+          serviceConfigVersionResponse.setIsCurrent(activeIds.contains(serviceConfigEntity.getServiceConfigId()));
 
           List<ClusterConfigEntity> clusterConfigEntities = serviceConfigEntity.getClusterConfigEntities();
           for (ClusterConfigEntity clusterConfigEntity : clusterConfigEntities) {
@@ -1596,16 +1602,40 @@ public class ClusterImpl implements Cluster {
     }
   }
 
-  @RequiresSession
-  Set<ServiceConfigVersionResponse> getActiveServiceConfigVersionSet() {
+  private Set<ServiceConfigVersionResponse> getActiveServiceConfigVersionSet() {
     Set<ServiceConfigVersionResponse> responses = new HashSet<ServiceConfigVersionResponse>();
-    List<ServiceConfigEntity> lastServiceConfigs = serviceConfigDAO.getLastServiceConfigs(getClusterId());
-    for (ServiceConfigEntity lastServiceConfig : lastServiceConfigs) {
-      responses.add(convertToServiceConfigVersionResponse(lastServiceConfig));
+    List<ServiceConfigEntity> activeServiceConfigVersions = getActiveServiceConfigVersionEntities();
+
+    for (ServiceConfigEntity lastServiceConfig : activeServiceConfigVersions) {
+      ServiceConfigVersionResponse response = convertToServiceConfigVersionResponse(lastServiceConfig);
+      response.setIsCurrent(true); //mark these as current, as they are
+      responses.add(response);
     }
     return responses;
   }
 
+  private Set<Long> getActiveServiceConfigVersionIds() {
+    Set<Long> idSet = new HashSet<Long>();
+    for (ServiceConfigEntity entity : getActiveServiceConfigVersionEntities()) {
+      idSet.add(entity.getServiceConfigId());
+    }
+    return idSet;
+  }
+
+  private List<ServiceConfigEntity> getActiveServiceConfigVersionEntities() {
+
+    List<ServiceConfigEntity> activeServiceConfigVersions = new ArrayList<ServiceConfigEntity>();
+    //for services
+    activeServiceConfigVersions.addAll(serviceConfigDAO.getLastServiceConfigs(getClusterId()));
+    //for config groups
+    if (clusterConfigGroups != null) {
+      activeServiceConfigVersions.addAll(
+        serviceConfigDAO.getLastServiceConfigVersionsForGroups(clusterConfigGroups.keySet()));
+    }
+
+    return activeServiceConfigVersions;
+  }
+
   @RequiresSession
   ServiceConfigVersionResponse getActiveServiceConfigVersion(String serviceName) {
     ServiceConfigEntity lastServiceConfig = serviceConfigDAO.getLastServiceConfig(getClusterId(), serviceName);
@@ -1626,11 +1656,20 @@ public class ClusterImpl implements Cluster {
     serviceConfigVersionResponse.setCreateTime(serviceConfigEntity.getCreateTimestamp());    
     serviceConfigVersionResponse.setUserName(serviceConfigEntity.getUser());
     serviceConfigVersionResponse.setNote(serviceConfigEntity.getNote());
+    if (clusterConfigGroups != null) {
+      ConfigGroup configGroup = clusterConfigGroups.get(serviceConfigEntity.getGroupId());
+      if (configGroup != null) {
+        serviceConfigVersionResponse.setGroupId(configGroup.getId());
+        serviceConfigVersionResponse.setGroupName(configGroup.getName());
+      }
+    }
+
+
     return serviceConfigVersionResponse;
   }
 
   @Transactional
-  void applyServiceConfigVersion(String serviceName, Long serviceConfigVersion, String user,
+  ServiceConfigVersionResponse applyServiceConfigVersion(String serviceName, Long serviceConfigVersion, String user,
                                  String serviceConfigVersionNote) throws AmbariException {
     ServiceConfigEntity serviceConfigEntity = serviceConfigDAO.findByServiceAndVersion(serviceName, serviceConfigVersion);
     if (serviceConfigEntity == null) {
@@ -1690,6 +1729,8 @@ public class ClusterImpl implements Cluster {
     serviceConfigEntityClone.setVersion(configVersionHelper.getNextVersion(serviceName));
 
     serviceConfigDAO.create(serviceConfigEntityClone);
+
+    return convertToServiceConfigVersionResponse(serviceConfigEntityClone);
   }
 
   @Transactional

http://git-wip-us.apache.org/repos/asf/ambari/blob/a8e6736a/ambari-server/src/main/resources/properties.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/properties.json b/ambari-server/src/main/resources/properties.json
index 5777935..785bd24 100644
--- a/ambari-server/src/main/resources/properties.json
+++ b/ambari-server/src/main/resources/properties.json
@@ -6,7 +6,7 @@
         "Clusters/version",
         "Clusters/state",
         "Clusters/desired_configs",
-        "Clusters/desired_serviceconfigversions",
+        "Clusters/desired_service_config_versions",
         "Clusters/total_hosts",
         "Clusters/health_report",
         "_"
@@ -91,7 +91,8 @@
         "ServiceConfigVersion/group_id",
         "ServiceConfigVersion/group_name",
         "ServiceConfigVersion/hosts",
-        "ServiceConfigVersion/serviceconfigversion",
+        "ServiceConfigVersion/service_config_version",
+        "ServiceConfigVersion/is_current",
         "ServiceConfigVersion/createtime",
         "ServiceConfigVersion/user",
         "ServiceConfigVersion/service_config_version_note"

http://git-wip-us.apache.org/repos/asf/ambari/blob/a8e6736a/ambari-server/src/test/java/org/apache/ambari/server/state/cluster/ClusterTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/state/cluster/ClusterTest.java b/ambari-server/src/test/java/org/apache/ambari/server/state/cluster/ClusterTest.java
index 201702b..a282e92 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/state/cluster/ClusterTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/state/cluster/ClusterTest.java
@@ -38,6 +38,7 @@ import java.util.Set;
 
 import javax.persistence.EntityManager;
 
+import com.google.common.collect.Multimap;
 import junit.framework.Assert;
 
 import org.apache.ambari.server.AmbariException;
@@ -652,11 +653,11 @@ public class ClusterTest {
       c1.getServiceConfigVersions();
     Assert.assertNotNull(serviceConfigVersions);
     Assert.assertEquals(1, serviceConfigVersions.size());
-    Map<String, ServiceConfigVersionResponse> activeServiceConfigVersions =
+    Map<String, Collection<ServiceConfigVersionResponse>> activeServiceConfigVersions =
       c1.getActiveServiceConfigVersions();
     Assert.assertEquals(1, activeServiceConfigVersions.size());
     ServiceConfigVersionResponse mapredResponse =
-      activeServiceConfigVersions.get("MAPREDUCE");
+      activeServiceConfigVersions.get("MAPREDUCE").iterator().next();
 
     Assert.assertEquals("MAPREDUCE", mapredResponse.getServiceName());
     Assert.assertEquals("c1", mapredResponse.getClusterName());
@@ -670,7 +671,7 @@ public class ClusterTest {
     Assert.assertEquals(2, serviceConfigVersions.size());
     // active version still 1
     Assert.assertEquals(1, activeServiceConfigVersions.size());
-    mapredResponse = activeServiceConfigVersions.get("MAPREDUCE");
+    mapredResponse = activeServiceConfigVersions.get("MAPREDUCE").iterator().next();
     Assert.assertEquals("MAPREDUCE", mapredResponse.getServiceName());
     Assert.assertEquals("c1", mapredResponse.getClusterName());
     Assert.assertEquals("admin", mapredResponse.getUserName());
@@ -683,7 +684,7 @@ public class ClusterTest {
     Assert.assertEquals(3, serviceConfigVersions.size());
     // active version still 1
     Assert.assertEquals(1, activeServiceConfigVersions.size());
-    mapredResponse = activeServiceConfigVersions.get("MAPREDUCE");
+    mapredResponse = activeServiceConfigVersions.get("MAPREDUCE").iterator().next();
     Assert.assertEquals("MAPREDUCE", mapredResponse.getServiceName());
     Assert.assertEquals("c1", mapredResponse.getClusterName());
     Assert.assertEquals("admin", mapredResponse.getUserName());
@@ -717,10 +718,10 @@ public class ClusterTest {
     Assert.assertEquals("version1", c1.getDesiredConfigByType("hdfs-site").getTag());
     Assert.assertEquals("version2", c1.getDesiredConfigByType("core-site").getTag());
 
-    Map<String, ServiceConfigVersionResponse> activeServiceConfigVersions =
+    Map<String, Collection<ServiceConfigVersionResponse>> activeServiceConfigVersions =
       c1.getActiveServiceConfigVersions();
     Assert.assertEquals(1, activeServiceConfigVersions.size());
 
 
   }
-}
\ No newline at end of file
+}


[12/35] git commit: AMBARI-7013. Fix UI Unit tests. (onechiporenko)

Posted by jo...@apache.org.
AMBARI-7013. Fix UI Unit tests. (onechiporenko)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/574461df
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/574461df
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/574461df

Branch: refs/heads/branch-alerts-dev
Commit: 574461df0847b83967ec64c429387fb4859eecd0
Parents: 9094078
Author: Oleg Nechiporenko <on...@apache.org>
Authored: Tue Aug 26 16:25:07 2014 +0300
Committer: Oleg Nechiporenko <on...@apache.org>
Committed: Tue Aug 26 16:25:07 2014 +0300

----------------------------------------------------------------------
 ambari-web/test/app_test.js                     | 52 --------------------
 .../dashboard/config_history_controller_test.js | 25 +++++++---
 ambari-web/test/utils/date_test.js              |  4 +-
 .../common/configs/config_history_flow_test.js  |  6 +--
 4 files changed, 23 insertions(+), 64 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/574461df/ambari-web/test/app_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/app_test.js b/ambari-web/test/app_test.js
index c16d5f0..85b50ee 100644
--- a/ambari-web/test/app_test.js
+++ b/ambari-web/test/app_test.js
@@ -81,58 +81,6 @@ describe('App', function () {
     });
   });
 
-  describe('#stack2VersionURL', function () {
-
-    var testCases = [
-      {
-        title: 'if currentStackVersion and defaultStackVersion are empty then stack2VersionURL should contain prefix',
-        currentStackVersion: '',
-        defaultStackVersion: '',
-        result: '/stacks2/HDP/versions/'
-      },
-      {
-        title: 'if currentStackVersion is "HDP-1.3.1" then stack2VersionURL should be "/stacks2/HDP/versions/1.3.1"',
-        currentStackVersion: 'HDP-1.3.1',
-        defaultStackVersion: '',
-        result: '/stacks2/HDP/versions/1.3.1'
-      },
-      {
-        title: 'if defaultStackVersion is "HDP-1.3.1" then stack2VersionURL should be "/stacks/HDP/versions/1.3.1"',
-        currentStackVersion: '',
-        defaultStackVersion: 'HDP-1.3.1',
-        result: '/stacks2/HDP/versions/1.3.1'
-      },
-      {
-        title: 'if defaultStackVersion and currentStackVersion are different then stack2VersionURL should have currentStackVersion value',
-        currentStackVersion: 'HDP-1.3.2',
-        defaultStackVersion: 'HDP-1.3.1',
-        result: '/stacks2/HDP/versions/1.3.2'
-      },
-      {
-        title: 'if defaultStackVersion is "HDPLocal-1.3.1" then stack2VersionURL should be "/stacks2/HDPLocal/versions/1.3.1"',
-        currentStackVersion: '',
-        defaultStackVersion: 'HDPLocal-1.3.1',
-        result: '/stacks2/HDPLocal/versions/1.3.1'
-      },
-      {
-        title: 'if currentStackVersion is "HDPLocal-1.3.1" then stack2VersionURL should be "/stacks2/HDPLocal/versions/1.3.1"',
-        currentStackVersion: 'HDPLocal-1.3.1',
-        defaultStackVersion: '',
-        result: '/stacks2/HDPLocal/versions/1.3.1'
-      }
-    ];
-
-    testCases.forEach(function (test) {
-      it(test.title, function () {
-        App.set('currentStackVersion', test.currentStackVersion);
-        App.set('defaultStackVersion', test.defaultStackVersion);
-        expect(App.get('stack2VersionURL')).to.equal(test.result);
-        App.set('currentStackVersion', "HDP-1.2.2");
-        App.set('defaultStackVersion', "HDP-1.2.2");
-      });
-    });
-  });
-
   describe('#falconServerURL', function () {
 
     var testCases = [

http://git-wip-us.apache.org/repos/asf/ambari/blob/574461df/ambari-web/test/controllers/main/dashboard/config_history_controller_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/controllers/main/dashboard/config_history_controller_test.js b/ambari-web/test/controllers/main/dashboard/config_history_controller_test.js
index 257e012..0692b9e 100644
--- a/ambari-web/test/controllers/main/dashboard/config_history_controller_test.js
+++ b/ambari-web/test/controllers/main/dashboard/config_history_controller_test.js
@@ -28,11 +28,11 @@ describe('MainConfigHistoryController', function () {
   describe('#realUrl', function () {
     it('cluster name is empty', function () {
       App.set('clusterName', '');
-      expect(controller.get('realUrl')).to.equal('/api/v1/clusters//configurations/serviceconfigversions?<parameters>fields=serviceconfigversion,user,appliedtime,createtime,service_name,service_config_version_note&minimal_response=true');
+      expect(controller.get('realUrl')).to.equal('/api/v1/clusters//configurations/serviceconfigversions?<parameters>fields=serviceconfigversion,user,group_id,group_name,createtime,service_name,service_config_version_note&minimal_response=true');
     });
     it('cluster name is "mycluster"', function () {
       App.set('clusterName', 'mycluster');
-      expect(controller.get('realUrl')).to.equal('/api/v1/clusters/mycluster/configurations/serviceconfigversions?<parameters>fields=serviceconfigversion,user,appliedtime,createtime,service_name,service_config_version_note&minimal_response=true');
+      expect(controller.get('realUrl')).to.equal('/api/v1/clusters/mycluster/configurations/serviceconfigversions?<parameters>fields=serviceconfigversion,user,group_id,group_name,createtime,service_name,service_config_version_note&minimal_response=true');
     });
   });
   describe('#load()', function () {
@@ -122,20 +122,31 @@ describe('MainConfigHistoryController', function () {
     });
     afterEach(function () {
       App.router.get.restore();
+      App.get.restore();
     });
     it('testMode is true', function () {
-      App.set('testMode', true);
+      sinon.stub(App, 'get', function(k) {
+        if ('testMode' === k) return true;
+        return Em.get(App, k);
+      });
       expect(controller.getUrl()).to.equal('/data/configurations/service_versions.json');
     });
     it('query params is empty', function () {
-      App.set('testMode', false);
-      expect(controller.getUrl()).to.equal('/api/v1/clusters/mycluster/configurations/serviceconfigversions?fields=serviceconfigversion,user,appliedtime,createtime,service_name,service_config_version_note&minimal_response=true');
+      sinon.stub(App, 'get', function(k) {
+        if ('testMode' === k) return false;
+        return Em.get(App, k);
+      });
+      expect(controller.getUrl()).to.equal('/api/v1/clusters/mycluster/configurations/serviceconfigversions?fields=serviceconfigversion,user,group_id,group_name,createtime,service_name,service_config_version_note&minimal_response=true');
     });
     it('query params is correct', function () {
-      App.set('testMode', false);
-      expect(controller.getUrl({})).to.equal('/api/v1/clusters/mycluster/configurations/serviceconfigversions?paramsfields=serviceconfigversion,user,appliedtime,createtime,service_name,service_config_version_note&minimal_response=true');
+      sinon.stub(App, 'get', function(k) {
+        if ('testMode' === k) return false;
+        return Em.get(App, k);
+      });
+      expect(controller.getUrl({})).to.equal('/api/v1/clusters/mycluster/configurations/serviceconfigversions?paramsfields=serviceconfigversion,user,group_id,group_name,createtime,service_name,service_config_version_note&minimal_response=true');
     });
   });
+
   describe('#doPolling()', function () {
     beforeEach(function () {
       sinon.stub(controller, 'load', function(){

http://git-wip-us.apache.org/repos/asf/ambari/blob/574461df/ambari-web/test/utils/date_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/utils/date_test.js b/ambari-web/test/utils/date_test.js
index 5644894..97c7048 100644
--- a/ambari-web/test/utils/date_test.js
+++ b/ambari-web/test/utils/date_test.js
@@ -89,7 +89,7 @@ describe('date', function () {
     it('Today timestamp', function() {
       var now = new Date();
       var then = new Date(now.getFullYear(),now.getUTCMonth(),now.getUTCDate(),0,0,0);
-      expect(date.dateFormatShort(then.getTime() + 10*3600*1000)).to.equal('Today 10:00:00');
+      expect(date.dateFormatShort(then.getTime() + 10*3600*1000)).to.contain('Today 10:00:00');
     });
     describe('Incorrect timestamps', function() {
       incorrect_tests.forEach(function(test) {
@@ -109,7 +109,7 @@ describe('date', function () {
       { t: today, e: ''}
     ];
     tests.forEach(function(test) {
-      var testMessage = 'should conver {0} to {1}'.format(test.t, test.e);
+      var testMessage = 'should convert {0} to {1}'.format(test.t, test.e);
       it(testMessage, function() {
         expect(date.startTime(test.t)).to.be.eql(test.e);
       });

http://git-wip-us.apache.org/repos/asf/ambari/blob/574461df/ambari-web/test/views/common/configs/config_history_flow_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/views/common/configs/config_history_flow_test.js b/ambari-web/test/views/common/configs/config_history_flow_test.js
index ba24dab..3317550 100644
--- a/ambari-web/test/views/common/configs/config_history_flow_test.js
+++ b/ambari-web/test/views/common/configs/config_history_flow_test.js
@@ -508,11 +508,11 @@ describe('App.ConfigHistoryFlowView', function () {
   });
 
   describe('#compare()', function () {
-    it('', function () {
+    it('should set compareServiceVersion', function () {
       sinon.spy(view.get('controller'), 'onConfigGroupChange');
-      view.compare({context: {version: 1}});
+      view.compare({context: Em.Object.create({version: 1})});
 
-      expect(view.get('controller.compareServiceVersion')).to.eql({version: 1});
+      expect(view.get('controller.compareServiceVersion.version')).to.equal(1);
       expect(view.get('controller').onConfigGroupChange.calledOnce).to.be.true;
       view.get('controller').onConfigGroupChange.restore();
     });


[05/35] git commit: AMBARI-7006. Provide stack_advisor python script for HDP 1.3.x stack versions

Posted by jo...@apache.org.
AMBARI-7006. Provide stack_advisor python script for HDP 1.3.x stack versions


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/fb1e0ca5
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/fb1e0ca5
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/fb1e0ca5

Branch: refs/heads/branch-alerts-dev
Commit: fb1e0ca5260d35938df2c10cd75ee5a594873c30
Parents: 46a91db
Author: Srimanth Gunturi <sg...@hortonworks.com>
Authored: Mon Aug 25 14:58:14 2014 -0700
Committer: Srimanth Gunturi <sg...@hortonworks.com>
Committed: Mon Aug 25 16:57:54 2014 -0700

----------------------------------------------------------------------
 .../server/api/services/AmbariMetaInfo.java     |  18 +
 .../stackadvisor/StackAdvisorHelper.java        |  14 +-
 ...GetComponentLayoutRecommnedationCommand.java |   5 +-
 .../GetComponentLayoutValidationCommand.java    |   5 +-
 .../GetConfigurationRecommnedationCommand.java  |  13 +-
 .../GetConfigurationValidationCommand.java      |   5 +-
 .../commands/StackAdvisorCommand.java           |  20 +-
 .../src/main/resources/scripts/stack_advisor.py |   5 +-
 .../stacks/HDP/1.3.2/services/stack_advisor.py  | 519 +++++++++++++++++++
 .../stacks/HDP/1.3.3/services/stack_advisor.py  |  25 +
 .../stacks/HDP/1.3/services/stack_advisor.py    |  25 +
 .../server/api/services/AmbariMetaInfoTest.java |   9 +
 .../stackadvisor/StackAdvisorHelperTest.java    |  22 +-
 .../commands/StackAdvisorCommandTest.java       |  80 ++-
 14 files changed, 728 insertions(+), 37 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/fb1e0ca5/ambari-server/src/main/java/org/apache/ambari/server/api/services/AmbariMetaInfo.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/api/services/AmbariMetaInfo.java b/ambari-server/src/main/java/org/apache/ambari/server/api/services/AmbariMetaInfo.java
index 80af575..56b866d 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/api/services/AmbariMetaInfo.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/api/services/AmbariMetaInfo.java
@@ -645,6 +645,24 @@ public class AmbariMetaInfo {
     return stackInfoResult;
   }
 
+  public List<String> getStackParentVersions(String stackName, String version) {
+    List<String> parents = new ArrayList<String>();
+    try {
+      StackInfo stackInfo = getStackInfo(stackName, version);
+      if (stackInfo != null) {
+        String parentVersion = stackInfo.getParentStackVersion();
+        if (parentVersion != null) {
+          parents.add(parentVersion);
+          parents.addAll(getStackParentVersions(stackName, parentVersion));
+        }
+      }
+    } catch (AmbariException e) {
+      // parent was not found. just returning empty list
+    } finally {
+      return parents;
+    }
+  }
+
   public Set<PropertyInfo> getProperties(String stackName, String version, String serviceName)
       throws AmbariException {
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/fb1e0ca5/ambari-server/src/main/java/org/apache/ambari/server/api/services/stackadvisor/StackAdvisorHelper.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/api/services/stackadvisor/StackAdvisorHelper.java b/ambari-server/src/main/java/org/apache/ambari/server/api/services/stackadvisor/StackAdvisorHelper.java
index 6903c1d..60cdd52 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/api/services/stackadvisor/StackAdvisorHelper.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/api/services/stackadvisor/StackAdvisorHelper.java
@@ -21,6 +21,7 @@ package org.apache.ambari.server.api.services.stackadvisor;
 import java.io.File;
 import java.io.IOException;
 
+import org.apache.ambari.server.api.services.AmbariMetaInfo;
 import org.apache.ambari.server.api.services.stackadvisor.StackAdvisorRequest.StackAdvisorRequestType;
 import org.apache.ambari.server.api.services.stackadvisor.commands.GetComponentLayoutRecommnedationCommand;
 import org.apache.ambari.server.api.services.stackadvisor.commands.GetComponentLayoutValidationCommand;
@@ -39,16 +40,19 @@ public class StackAdvisorHelper {
 
   private File recommendationsDir;
   private String stackAdvisorScript;
+  private final AmbariMetaInfo metaInfo;
 
   /* Monotonically increasing requestid */
   private int requestId = 0;
   private StackAdvisorRunner saRunner;
 
   @Inject
-  public StackAdvisorHelper(Configuration conf, StackAdvisorRunner saRunner) throws IOException {
+  public StackAdvisorHelper(Configuration conf, StackAdvisorRunner saRunner,
+                            AmbariMetaInfo metaInfo) throws IOException {
     this.recommendationsDir = conf.getRecommendationsDir();
     this.stackAdvisorScript = conf.getStackAdvisorScript();
     this.saRunner = saRunner;
+    this.metaInfo = metaInfo;
   }
 
   /**
@@ -74,10 +78,10 @@ public class StackAdvisorHelper {
     StackAdvisorCommand<ValidationResponse> command;
     if (requestType == StackAdvisorRequestType.HOST_GROUPS) {
       command = new GetComponentLayoutValidationCommand(recommendationsDir, stackAdvisorScript,
-          requestId, saRunner);
+          requestId, saRunner, metaInfo);
     } else if (requestType == StackAdvisorRequestType.CONFIGURATIONS) {
       command = new GetConfigurationValidationCommand(recommendationsDir, stackAdvisorScript,
-          requestId, saRunner);
+          requestId, saRunner, metaInfo);
     } else {
       throw new StackAdvisorException(String.format("Unsupported request type, type=%s",
           requestType));
@@ -109,10 +113,10 @@ public class StackAdvisorHelper {
     StackAdvisorCommand<RecommendationResponse> command;
     if (requestType == StackAdvisorRequestType.HOST_GROUPS) {
       command = new GetComponentLayoutRecommnedationCommand(recommendationsDir, stackAdvisorScript,
-          requestId, saRunner);
+          requestId, saRunner, metaInfo);
     } else if (requestType == StackAdvisorRequestType.CONFIGURATIONS) {
       command = new GetConfigurationRecommnedationCommand(recommendationsDir, stackAdvisorScript,
-          requestId, saRunner);
+          requestId, saRunner, metaInfo);
     } else {
       throw new StackAdvisorException(String.format("Unsupported request type, type=%s",
           requestType));

http://git-wip-us.apache.org/repos/asf/ambari/blob/fb1e0ca5/ambari-server/src/main/java/org/apache/ambari/server/api/services/stackadvisor/commands/GetComponentLayoutRecommnedationCommand.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/api/services/stackadvisor/commands/GetComponentLayoutRecommnedationCommand.java b/ambari-server/src/main/java/org/apache/ambari/server/api/services/stackadvisor/commands/GetComponentLayoutRecommnedationCommand.java
index 6e7533a..b91f912 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/api/services/stackadvisor/commands/GetComponentLayoutRecommnedationCommand.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/api/services/stackadvisor/commands/GetComponentLayoutRecommnedationCommand.java
@@ -20,6 +20,7 @@ package org.apache.ambari.server.api.services.stackadvisor.commands;
 
 import java.io.File;
 
+import org.apache.ambari.server.api.services.AmbariMetaInfo;
 import org.apache.ambari.server.api.services.stackadvisor.StackAdvisorException;
 import org.apache.ambari.server.api.services.stackadvisor.StackAdvisorRequest;
 import org.apache.ambari.server.api.services.stackadvisor.StackAdvisorRunner;
@@ -33,8 +34,8 @@ public class GetComponentLayoutRecommnedationCommand extends
     StackAdvisorCommand<RecommendationResponse> {
 
   public GetComponentLayoutRecommnedationCommand(File recommendationsDir,
-      String stackAdvisorScript, int requestId, StackAdvisorRunner saRunner) {
-    super(recommendationsDir, stackAdvisorScript, requestId, saRunner);
+      String stackAdvisorScript, int requestId, StackAdvisorRunner saRunner, AmbariMetaInfo metaInfo) {
+    super(recommendationsDir, stackAdvisorScript, requestId, saRunner, metaInfo);
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/ambari/blob/fb1e0ca5/ambari-server/src/main/java/org/apache/ambari/server/api/services/stackadvisor/commands/GetComponentLayoutValidationCommand.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/api/services/stackadvisor/commands/GetComponentLayoutValidationCommand.java b/ambari-server/src/main/java/org/apache/ambari/server/api/services/stackadvisor/commands/GetComponentLayoutValidationCommand.java
index a5453f6..1a1fc98 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/api/services/stackadvisor/commands/GetComponentLayoutValidationCommand.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/api/services/stackadvisor/commands/GetComponentLayoutValidationCommand.java
@@ -20,6 +20,7 @@ package org.apache.ambari.server.api.services.stackadvisor.commands;
 
 import java.io.File;
 
+import org.apache.ambari.server.api.services.AmbariMetaInfo;
 import org.apache.ambari.server.api.services.stackadvisor.StackAdvisorException;
 import org.apache.ambari.server.api.services.stackadvisor.StackAdvisorRequest;
 import org.apache.ambari.server.api.services.stackadvisor.StackAdvisorRunner;
@@ -31,8 +32,8 @@ import org.apache.ambari.server.api.services.stackadvisor.validations.Validation
 public class GetComponentLayoutValidationCommand extends StackAdvisorCommand<ValidationResponse> {
 
   public GetComponentLayoutValidationCommand(File recommendationsDir, String stackAdvisorScript,
-      int requestId, StackAdvisorRunner saRunner) {
-    super(recommendationsDir, stackAdvisorScript, requestId, saRunner);
+      int requestId, StackAdvisorRunner saRunner, AmbariMetaInfo metaInfo) {
+    super(recommendationsDir, stackAdvisorScript, requestId, saRunner, metaInfo);
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/ambari/blob/fb1e0ca5/ambari-server/src/main/java/org/apache/ambari/server/api/services/stackadvisor/commands/GetConfigurationRecommnedationCommand.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/api/services/stackadvisor/commands/GetConfigurationRecommnedationCommand.java b/ambari-server/src/main/java/org/apache/ambari/server/api/services/stackadvisor/commands/GetConfigurationRecommnedationCommand.java
index e65f97f..52df3d0 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/api/services/stackadvisor/commands/GetConfigurationRecommnedationCommand.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/api/services/stackadvisor/commands/GetConfigurationRecommnedationCommand.java
@@ -18,6 +18,7 @@
 
 package org.apache.ambari.server.api.services.stackadvisor.commands;
 
+import org.apache.ambari.server.api.services.AmbariMetaInfo;
 import org.apache.ambari.server.api.services.stackadvisor.StackAdvisorException;
 import org.apache.ambari.server.api.services.stackadvisor.StackAdvisorRequest;
 import org.apache.ambari.server.api.services.stackadvisor.StackAdvisorRunner;
@@ -37,9 +38,9 @@ import java.util.Set;
 public class GetConfigurationRecommnedationCommand extends
     StackAdvisorCommand<RecommendationResponse> {
 
-  public GetConfigurationRecommnedationCommand(File recommendationsDir,
-                                               String stackAdvisorScript, int requestId, StackAdvisorRunner saRunner) {
-    super(recommendationsDir, stackAdvisorScript, requestId, saRunner);
+  public GetConfigurationRecommnedationCommand(File recommendationsDir, String stackAdvisorScript, int requestId,
+                                               StackAdvisorRunner saRunner, AmbariMetaInfo metaInfo) {
+    super(recommendationsDir, stackAdvisorScript, requestId, saRunner, metaInfo);
   }
 
   @Override
@@ -56,12 +57,6 @@ public class GetConfigurationRecommnedationCommand extends
   }
 
   @Override
-  protected StackAdvisorData adjust(StackAdvisorData data, StackAdvisorRequest request) {
-    // do nothing
-    return data;
-  }
-
-  @Override
   protected RecommendationResponse updateResponse(StackAdvisorRequest request, RecommendationResponse response) {
     response.getRecommendations().getBlueprint().setHostGroups(processHostGroups(request));
     response.getRecommendations().getBlueprintClusterBinding().setHostGroups(processHostGroupBindings(request));

http://git-wip-us.apache.org/repos/asf/ambari/blob/fb1e0ca5/ambari-server/src/main/java/org/apache/ambari/server/api/services/stackadvisor/commands/GetConfigurationValidationCommand.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/api/services/stackadvisor/commands/GetConfigurationValidationCommand.java b/ambari-server/src/main/java/org/apache/ambari/server/api/services/stackadvisor/commands/GetConfigurationValidationCommand.java
index 2cd6481..36fe6cb 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/api/services/stackadvisor/commands/GetConfigurationValidationCommand.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/api/services/stackadvisor/commands/GetConfigurationValidationCommand.java
@@ -20,6 +20,7 @@ package org.apache.ambari.server.api.services.stackadvisor.commands;
 
 import java.io.File;
 
+import org.apache.ambari.server.api.services.AmbariMetaInfo;
 import org.apache.ambari.server.api.services.stackadvisor.StackAdvisorException;
 import org.apache.ambari.server.api.services.stackadvisor.StackAdvisorRequest;
 import org.apache.ambari.server.api.services.stackadvisor.StackAdvisorRunner;
@@ -31,8 +32,8 @@ import org.apache.ambari.server.api.services.stackadvisor.validations.Validation
 public class GetConfigurationValidationCommand extends StackAdvisorCommand<ValidationResponse> {
 
   public GetConfigurationValidationCommand(File recommendationsDir, String stackAdvisorScript,
-      int requestId, StackAdvisorRunner saRunner) {
-    super(recommendationsDir, stackAdvisorScript, requestId, saRunner);
+      int requestId, StackAdvisorRunner saRunner, AmbariMetaInfo metaInfo) {
+    super(recommendationsDir, stackAdvisorScript, requestId, saRunner, metaInfo);
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/ambari/blob/fb1e0ca5/ambari-server/src/main/java/org/apache/ambari/server/api/services/stackadvisor/commands/StackAdvisorCommand.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/api/services/stackadvisor/commands/StackAdvisorCommand.java b/ambari-server/src/main/java/org/apache/ambari/server/api/services/stackadvisor/commands/StackAdvisorCommand.java
index 607e337..81d4605 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/api/services/stackadvisor/commands/StackAdvisorCommand.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/api/services/stackadvisor/commands/StackAdvisorCommand.java
@@ -35,6 +35,7 @@ import javax.ws.rs.core.Response;
 import javax.ws.rs.core.Response.Status;
 
 import org.apache.ambari.server.api.resources.ResourceInstance;
+import org.apache.ambari.server.api.services.AmbariMetaInfo;
 import org.apache.ambari.server.api.services.BaseService;
 import org.apache.ambari.server.api.services.LocalUriInfo;
 import org.apache.ambari.server.api.services.Request;
@@ -52,6 +53,7 @@ import org.codehaus.jackson.map.ObjectMapper;
 import org.codehaus.jackson.map.SerializationConfig;
 import org.codehaus.jackson.node.ArrayNode;
 import org.codehaus.jackson.node.ObjectNode;
+import org.codehaus.jackson.node.TextNode;
 
 /**
  * Parent for all commands.
@@ -89,9 +91,11 @@ public abstract class StackAdvisorCommand<T> extends BaseService {
 
   protected ObjectMapper mapper;
 
+  private final AmbariMetaInfo metaInfo;
+
   @SuppressWarnings("unchecked")
   public StackAdvisorCommand(File recommendationsDir, String stackAdvisorScript, int requestId,
-      StackAdvisorRunner saRunner) {
+      StackAdvisorRunner saRunner, AmbariMetaInfo metaInfo) {
     this.type = (Class<T>) ((ParameterizedType) getClass().getGenericSuperclass())
         .getActualTypeArguments()[0];
 
@@ -102,6 +106,7 @@ public abstract class StackAdvisorCommand<T> extends BaseService {
     this.stackAdvisorScript = stackAdvisorScript;
     this.requestId = requestId;
     this.saRunner = saRunner;
+    this.metaInfo = metaInfo;
   }
 
   protected abstract StackAdvisorCommandType getCommandType();
@@ -133,6 +138,7 @@ public abstract class StackAdvisorCommand<T> extends BaseService {
     try {
       ObjectNode root = (ObjectNode) this.mapper.readTree(data.servicesJSON);
 
+      populateStackHierarchy(root);
       populateComponentHostsMap(root, request.getComponentHostsMap());
       populateConfigurations(root, request.getConfigurations());
 
@@ -166,6 +172,18 @@ public abstract class StackAdvisorCommand<T> extends BaseService {
     }
   }
 
+  protected void populateStackHierarchy(ObjectNode root) {
+    ObjectNode version = (ObjectNode) root.get("Versions");
+    TextNode stackName = (TextNode) version.get("stack_name");
+    TextNode stackVersion = (TextNode) version.get("stack_version");
+    ObjectNode stackHierarchy = version.putObject("stack_hierarchy");
+    stackHierarchy.put("stack_name", stackName);
+    ArrayNode parents = stackHierarchy.putArray("stack_versions");
+    for (String parentVersion : metaInfo.getStackParentVersions(stackName.asText(), stackVersion.asText())) {
+      parents.add(parentVersion);
+    }
+  }
+
   private void populateComponentHostsMap(ObjectNode root, Map<String, Set<String>> componentHostsMap) {
     ArrayNode services = (ArrayNode) root.get(SERVICES_PROPETRY);
     Iterator<JsonNode> servicesIter = services.getElements();

http://git-wip-us.apache.org/repos/asf/ambari/blob/fb1e0ca5/ambari-server/src/main/resources/scripts/stack_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/scripts/stack_advisor.py b/ambari-server/src/main/resources/scripts/stack_advisor.py
index 05d3a20..61d1e09 100755
--- a/ambari-server/src/main/resources/scripts/stack_advisor.py
+++ b/ambari-server/src/main/resources/scripts/stack_advisor.py
@@ -82,9 +82,8 @@ def main(argv=None):
   stackName = services["Versions"]["stack_name"]
   stackVersion = services["Versions"]["stack_version"]
   parentVersions = []
-  if "parent_stack_version" in services["Versions"] and \
-      services["Versions"]["parent_stack_version"] is not None:
-    parentVersions = [ services["Versions"]["parent_stack_version"] ]
+  if "stack_hierarchy" in services["Versions"]:
+    parentVersions = services["Versions"]["stack_hierarchy"]["stack_versions"]
 
   stackAdvisor = instantiateStackAdvisor(stackName, stackVersion, parentVersions)
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/fb1e0ca5/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/stack_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/stack_advisor.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/stack_advisor.py
new file mode 100644
index 0000000..f3c1e1d
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/stack_advisor.py
@@ -0,0 +1,519 @@
+#!/usr/bin/env ambari-python-wrap
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+
+import re
+import socket
+import sys
+
+from stack_advisor import StackAdvisor
+
+class HDP132StackAdvisor(StackAdvisor):
+
+  def recommendComponentLayout(self, services, hosts):
+    """
+    Returns Services object with hostnames array populated for components
+    If hostnames are populated for some components (partial blueprint) - these components will not be processed
+    """
+    stackName = services["Versions"]["stack_name"]
+    stackVersion = services["Versions"]["stack_version"]
+    hostsList = [host["Hosts"]["host_name"] for host in hosts["items"]]
+    servicesList = [service["StackServices"]["service_name"] for service in services["services"]]
+
+    recommendations = {
+      "Versions": {"stack_name": stackName, "stack_version": stackVersion},
+      "hosts": hostsList,
+      "services": servicesList,
+      "recommendations": {
+        "blueprint": {
+          "host_groups": [ ]
+        },
+        "blueprint_cluster_binding": {
+          "host_groups": [ ]
+        }
+      }
+    }
+
+    hostsComponentsMap = {}
+
+    #extend 'hostsComponentsMap' with MASTER components
+    for service in services["services"]:
+      masterComponents = [component for component in service["components"] if isMaster(component)]
+      for component in masterComponents:
+        componentName = component["StackServiceComponents"]["component_name"]
+        hostsForComponent = []
+
+        if isAlreadyPopulated(component):
+          hostsForComponent = component["StackServiceComponents"]["hostnames"]
+        else:
+          availableHosts = hostsList
+          if len(hostsList) > 1 and self.isNotPreferableOnAmbariServerHost(component):
+            availableHosts = [hostName for hostName in hostsList if not isLocalHost(hostName)]
+
+          if isMasterWithMultipleInstances(component):
+            hostsCount = defaultNoOfMasterHosts(component)
+            if hostsCount > 1: # get first 'hostsCount' available hosts
+              if len(availableHosts) < hostsCount:
+                hostsCount = len(availableHosts)
+              hostsForComponent = availableHosts[:hostsCount]
+            else:
+              hostsForComponent = [self.getHostForComponent(component, availableHosts)]
+          else:
+            hostsForComponent = [self.getHostForComponent(component, availableHosts)]
+
+        #extend 'hostsComponentsMap' with 'hostsForComponent'
+        for hostName in hostsForComponent:
+          if hostName not in hostsComponentsMap:
+            hostsComponentsMap[hostName] = []
+          hostsComponentsMap[hostName].append( { "name":componentName } )
+
+    #extend 'hostsComponentsMap' with Slave and Client Components
+    componentsListList = [service["components"] for service in services["services"]]
+    componentsList = [item for sublist in componentsListList for item in sublist]
+    usedHostsListList = [component["StackServiceComponents"]["hostnames"] for component in componentsList if not self.isNotValuable(component)]
+    utilizedHosts = [item for sublist in usedHostsListList for item in sublist]
+    freeHosts = [hostName for hostName in hostsList if hostName not in utilizedHosts]
+
+    for service in services["services"]:
+      slaveClientComponents = [component for component in service["components"] if isSlave(component) or isClient(component)]
+      for component in slaveClientComponents:
+        componentName = component["StackServiceComponents"]["component_name"]
+        hostsForComponent = []
+
+        if isAlreadyPopulated(component):
+          hostsForComponent = component["StackServiceComponents"]["hostnames"]
+        elif component["StackServiceComponents"]["cardinality"] == "ALL":
+          hostsForComponent = hostsList
+        else:
+          if len(freeHosts) == 0:
+            hostsForComponent = hostsList[-1:]
+          else: # len(freeHosts) >= 1
+            hostsForComponent = freeHosts
+            if isClient(component):
+              hostsForComponent = freeHosts[0:1]
+
+        #extend 'hostsComponentsMap' with 'hostsForComponent'
+        for hostName in hostsForComponent:
+          if hostName not in hostsComponentsMap:
+            hostsComponentsMap[hostName] = []
+          hostsComponentsMap[hostName].append( { "name": componentName } )
+
+    #prepare 'host-group's from 'hostsComponentsMap'
+    host_groups = recommendations["recommendations"]["blueprint"]["host_groups"]
+    bindings = recommendations["recommendations"]["blueprint_cluster_binding"]["host_groups"]
+    index = 0
+    for key in hostsComponentsMap.keys():
+      index += 1
+      host_group_name = "host-group-{0}".format(index)
+      host_groups.append( { "name": host_group_name, "components": hostsComponentsMap[key] } )
+      bindings.append( { "name": host_group_name, "hosts": [{ "fqdn": socket.getfqdn(key) }] } )
+
+    return recommendations
+  pass
+
+  def getHostForComponent(self, component, hostsList):
+    componentName = component["StackServiceComponents"]["component_name"]
+    scheme = self.defineSelectionScheme(componentName)
+
+    if len(hostsList) == 1:
+      return hostsList[0]
+    else:
+      for key in scheme.keys():
+        if isinstance(key, ( int, long )):
+          if len(hostsList) < key:
+            return hostsList[scheme[key]]
+      return hostsList[scheme['else']]
+
+  def defineSelectionScheme(self, componentName):
+    scheme = self.selectionScheme(componentName)
+    if scheme is None:
+      scheme = {"else": 0}
+    return scheme
+
+  def selectionScheme(self, componentName):
+    return {
+      'NAMENODE': {"else": 0},
+      'SECONDARY_NAMENODE': {"else": 1},
+      'HBASE_MASTER': {6: 0, 31: 2, "else": 3},
+
+      'HISTORYSERVER': {31: 1, "else": 2},
+      'RESOURCEMANAGER': {31: 1, "else": 2},
+
+      'OOZIE_SERVER': {6: 1, 31: 2, "else": 3},
+
+      'HIVE_SERVER': {6: 1, 31: 2, "else": 4},
+      'HIVE_METASTORE': {6: 1, 31: 2, "else": 4},
+      'WEBHCAT_SERVER': {6: 1, 31: 2, "else": 4},
+      }.get(componentName, None)
+
+  def isNotPreferableOnAmbariServerHost(self, component):
+    componentName = component["StackServiceComponents"]["component_name"]
+    service = ['GANGLIA_SERVER', 'NAGIOS_SERVER']
+    return componentName in service
+
+  def validateComponentLayout(self, services, hosts):
+    """Returns array of Validation objects about issues with hostnames components assigned to"""
+    stackName = services["Versions"]["stack_name"]
+    stackVersion = services["Versions"]["stack_version"]
+
+    validations = {
+      "Versions": {"stack_name": stackName, "stack_version": stackVersion},
+      "items": [ ]
+    }
+    items = validations["items"]
+
+    # Validating NAMENODE and SECONDARY_NAMENODE are on different hosts if possible
+    hostsList = [host["Hosts"]["host_name"] for host in hosts["items"]]
+    hostsCount = len(hostsList)
+    servicesList = [service["StackServices"]["service_name"] for service in services["services"]]
+
+    componentsListList = [service["components"] for service in services["services"]]
+    componentsList = [item for sublist in componentsListList for item in sublist]
+    nameNodeHosts = [component["StackServiceComponents"]["hostnames"] for component in componentsList if component["StackServiceComponents"]["component_name"] == "NAMENODE"]
+    secondaryNameNodeHosts = [component["StackServiceComponents"]["hostnames"] for component in componentsList if component["StackServiceComponents"]["component_name"] == "SECONDARY_NAMENODE"]
+
+    if hostsCount > 1 and len(nameNodeHosts) > 0 and len(secondaryNameNodeHosts) > 0:
+      nameNodeHosts = nameNodeHosts[0]
+      secondaryNameNodeHosts = secondaryNameNodeHosts[0]
+      commonHosts = list(set(nameNodeHosts).intersection(secondaryNameNodeHosts))
+      for host in commonHosts:
+        items.append( { "type": 'host-component', "level": 'WARN', "message": 'NameNode and Secondary NameNode cannot be hosted on same machine', "component-name": 'NAMENODE', "host": str(host) } )
+        items.append( { "type": 'host-component', "level": 'WARN', "message": 'NameNode and Secondary NameNode cannot be hosted on same machine', "component-name": 'SECONDARY_NAMENODE', "host": str(host) } )
+
+    # Validating cardinality
+    for component in componentsList:
+      if component["StackServiceComponents"]["cardinality"] is not None:
+         componentName = component["StackServiceComponents"]["component_name"]
+         componentHostsCount = 0
+         if component["StackServiceComponents"]["hostnames"] is not None:
+           componentHostsCount = len(component["StackServiceComponents"]["hostnames"])
+         cardinality = str(component["StackServiceComponents"]["cardinality"])
+         # cardinality types: null, 1+, 1-2, 1, ALL
+         hostsMax = -sys.maxint - 1
+         hostsMin = sys.maxint
+         hostsMin = 0
+         hostsMax = 0
+         if "+" in cardinality:
+           hostsMin = int(cardinality[:-1])
+           hostsMax = sys.maxint
+         elif "-" in cardinality:
+           nums = cardinality.split("-")
+           hostsMin = int(nums[0])
+           hostsMax = int(nums[1])
+         elif "ALL" == cardinality:
+           hostsMin = hostsCount
+           hostsMax = hostsCount
+         else:
+           hostsMin = int(cardinality)
+           hostsMax = int(cardinality)
+
+         if componentHostsCount > hostsMax or componentHostsCount < hostsMin:
+           items.append( { "type": 'host-component', "level": 'ERROR', "message": 'Cardinality violation, cardinality={0}, hosts count={1}'.format(cardinality, str(componentHostsCount)), "component-name": str(componentName) } )
+
+    # Validating host-usage
+    usedHostsListList = [component["StackServiceComponents"]["hostnames"] for component in componentsList if not self.isNotValuable(component)]
+    usedHostsList = [item for sublist in usedHostsListList for item in sublist]
+    nonUsedHostsList = [item for item in hostsList if item not in usedHostsList]
+    for host in nonUsedHostsList:
+      items.append( { "type": 'host-component', "level": 'ERROR', "message": 'Host is not used', "host": str(host) } )
+
+    return validations
+  pass
+
+  def isNotValuable(self, component):
+    componentName = component["StackServiceComponents"]["component_name"]
+    service = ['JOURNALNODE', 'ZKFC', 'GANGLIA_MONITOR']
+    return componentName in service
+
+  def recommendConfigurations(self, services, hosts):
+    stackName = services["Versions"]["stack_name"]
+    stackVersion = services["Versions"]["stack_version"]
+    hostsList = [host["Hosts"]["host_name"] for host in hosts["items"]]
+    servicesList = [service["StackServices"]["service_name"] for service in services["services"]]
+    components = [component["StackServiceComponents"]["component_name"]
+                  for service in services["services"]
+                  for component in service["components"]]
+
+    clusterData = self.getClusterData(servicesList, hosts, components)
+
+    recommendations = {
+      "Versions": {"stack_name": stackName, "stack_version": stackVersion},
+      "hosts": hostsList,
+      "services": servicesList,
+      "recommendations": {
+        "blueprint": {
+          "configurations": {},
+          "host_groups": []
+        },
+        "blueprint_cluster_binding": {
+          "host_groups": []
+        }
+      }
+    }
+
+    configurations = recommendations["recommendations"]["blueprint"]["configurations"]
+
+    for service in servicesList:
+      calculation = self.recommendServiceConfigurations(service)
+      if calculation is not None:
+        calculation(configurations, clusterData)
+
+    return recommendations
+
+  def recommendServiceConfigurations(self, service):
+    return {
+    }.get(service, None)
+
+  def putProperty(self, config, configType):
+    config[configType] = {"properties": {}}
+    def appendProperty(key, value):
+      config[configType]["properties"][key] = str(value)
+    return appendProperty
+
+  def getClusterData(self, servicesList, hosts, components):
+
+    hBaseInstalled = False
+    if 'HBASE' in servicesList:
+      hBaseInstalled = True
+
+    cluster = {
+      "cpu": 0,
+      "disk": 0,
+      "ram": 0,
+      "hBaseInstalled": hBaseInstalled,
+      "components": components
+    }
+
+    if len(hosts["items"]) > 0:
+      host = hosts["items"][0]["Hosts"]
+      cluster["cpu"] = host["cpu_count"]
+      cluster["disk"] = len(host["disk_info"])
+      cluster["ram"] = int(host["total_mem"] / (1024 * 1024))
+
+    ramRecommendations = [
+      {"os":1, "hbase":1},
+      {"os":2, "hbase":1},
+      {"os":2, "hbase":2},
+      {"os":4, "hbase":4},
+      {"os":6, "hbase":8},
+      {"os":8, "hbase":8},
+      {"os":8, "hbase":8},
+      {"os":12, "hbase":16},
+      {"os":24, "hbase":24},
+      {"os":32, "hbase":32},
+      {"os":64, "hbase":64}
+    ]
+    index = {
+      cluster["ram"] <= 4: 0,
+      4 < cluster["ram"] <= 8: 1,
+      8 < cluster["ram"] <= 16: 2,
+      16 < cluster["ram"] <= 24: 3,
+      24 < cluster["ram"] <= 48: 4,
+      48 < cluster["ram"] <= 64: 5,
+      64 < cluster["ram"] <= 72: 6,
+      72 < cluster["ram"] <= 96: 7,
+      96 < cluster["ram"] <= 128: 8,
+      128 < cluster["ram"] <= 256: 9,
+      256 < cluster["ram"]: 10
+    }[1]
+    cluster["reservedRam"] = ramRecommendations[index]["os"]
+    cluster["hbaseRam"] = ramRecommendations[index]["hbase"]
+
+    cluster["minContainerSize"] = {
+      cluster["ram"] <= 4: 256,
+      4 < cluster["ram"] <= 8: 512,
+      8 < cluster["ram"] <= 24: 1024,
+      24 < cluster["ram"]: 2048
+    }[1]
+
+    '''containers = max(3, min (2*cores,min (1.8*DISKS,(Total available RAM) / MIN_CONTAINER_SIZE))))'''
+    cluster["containers"] = max(3,
+                                min(2 * cluster["cpu"],
+                                    int(min(1.8 * cluster["disk"],
+                                            cluster["ram"] / cluster["minContainerSize"]))))
+
+    '''ramPerContainers = max(2GB, RAM - reservedRam - hBaseRam) / containers'''
+    cluster["ramPerContainer"] = max(2048,
+                                     cluster["ram"] - cluster["reservedRam"] - cluster["hbaseRam"])
+    cluster["ramPerContainer"] /= cluster["containers"]
+    '''If greater than 1GB, value will be in multiples of 512.'''
+    if cluster["ramPerContainer"] > 1024:
+      cluster["ramPerContainer"] = ceil(cluster["ramPerContainer"] / 512) * 512
+
+    cluster["mapMemory"] = int(cluster["ramPerContainer"])
+    cluster["reduceMemory"] = cluster["ramPerContainer"]
+    cluster["amMemory"] = max(cluster["mapMemory"], cluster["reduceMemory"])
+
+    return cluster
+
+
+  def validateConfigurations(self, services, hosts):
+    """Returns array of Validation objects about issues with configuration values provided in services"""
+    stackName = services["Versions"]["stack_name"]
+    stackVersion = services["Versions"]["stack_version"]
+
+    validations = {
+      "Versions": {"stack_name": stackName, "stack_version": stackVersion},
+      "items": [ ]
+    }
+    items = validations["items"]
+
+    recommendations = self.recommendConfigurations(services, hosts)
+    recommendedDefaults = recommendations["recommendations"]["blueprint"]["configurations"]
+
+    configurations = services["configurations"]
+    for service in services["services"]:
+      serviceName = service["StackServices"]["service_name"]
+      validator = self.validateServiceConfigurations(serviceName)
+      if validator is not None:
+        siteName = validator[0]
+        method = validator[1]
+        if siteName in recommendedDefaults:
+          siteProperties = getSiteProperties(configurations, siteName)
+          if siteProperties is not None:
+            resultItems = method(siteProperties, recommendedDefaults[siteName]["properties"])
+            items.extend(resultItems)
+    return validations
+    pass
+
+  def validateServiceConfigurations(self, serviceName):
+    return {
+    }.get(serviceName, None)
+
+  def toConfigurationValidationErrors(self, items, siteName):
+    result = []
+    for item in items:
+      if item["message"] is not None:
+        error = { "type": 'configuration', "level": 'ERROR', "message": item["message"], "config-type": siteName, "config-name": item["config-name"] }
+        result.append(error)
+    return result
+
+  def validatorLessThenDefaultValue(self, properties, recommendedDefaults, propertyName):
+    if not propertyName in properties:
+      return "Value should be set"
+    value = to_number(properties[propertyName])
+    if value is None:
+      return "Value should be integer"
+    defaultValue = to_number(recommendedDefaults[propertyName])
+    if defaultValue is None:
+      return None
+    if value < defaultValue:
+      return "Value is less than the recommended default of {0}".format(defaultValue)
+    return None
+
+  def validateXmxValue(self, properties, recommendedDefaults, propertyName):
+    if not propertyName in properties:
+      return "Value should be set"
+    value = properties[propertyName]
+    defaultValue = recommendedDefaults[propertyName]
+    if defaultValue is None:
+      return "Config's default value can't be null or undefined"
+    if not checkXmxValueFormat(value):
+      return 'Invalid value format'
+    valueInt = formatXmxSizeToBytes(getXmxSize(value))
+    defaultValueXmx = getXmxSize(defaultValue)
+    defaultValueInt = formatXmxSizeToBytes(defaultValueXmx)
+    if valueInt < defaultValueInt:
+      return "Value is less than the recommended default of -Xmx" + defaultValueXmx
+    return None
+
+
+# Validation helper methods
+def getSiteProperties(configurations, siteName):
+  siteConfig = configurations.get(siteName)
+  if siteConfig is None:
+    return None
+  return siteConfig.get("properties")
+
+def to_number(s):
+  try:
+    return int(re.sub("\D", "", s))
+  except ValueError:
+    return None
+
+def checkXmxValueFormat(value):
+  p = re.compile('-Xmx(\d+)(b|k|m|g|p|t|B|K|M|G|P|T)?')
+  matches = p.findall(value)
+  return len(matches) == 1
+
+def getXmxSize(value):
+  p = re.compile("-Xmx(\d+)(.?)")
+  result = p.findall(value)[0]
+  if len(result) > 1:
+    # result[1] - is a space or size formatter (b|k|m|g etc)
+    return result[0] + result[1].lower()
+  return result[0]
+
+def formatXmxSizeToBytes(value):
+  value = value.lower()
+  if len(value) == 0:
+    return 0
+  modifier = value[-1]
+
+  if modifier == ' ' or modifier in "0123456789":
+    modifier = 'b'
+  m = {
+    modifier == 'b': 1,
+    modifier == 'k': 1024,
+    modifier == 'm': 1024 * 1024,
+    modifier == 'g': 1024 * 1024 * 1024,
+    modifier == 't': 1024 * 1024 * 1024 * 1024,
+    modifier == 'p': 1024 * 1024 * 1024 * 1024 * 1024
+    }[1]
+  return to_number(value) * m
+
+
+# Recommendation helper methods
+def isAlreadyPopulated(component):
+  if component["StackServiceComponents"]["hostnames"] is not None:
+    return len(component["StackServiceComponents"]["hostnames"]) > 0
+  return False
+
+def isClient(component):
+  return component["StackServiceComponents"]["component_category"] == 'CLIENT'
+
+def isSlave(component):
+  componentName = component["StackServiceComponents"]["component_name"]
+  isSlave = component["StackServiceComponents"]["component_category"] == 'SLAVE'
+  return isSlave
+
+def isMaster(component):
+  componentName = component["StackServiceComponents"]["component_name"]
+  isMaster = component["StackServiceComponents"]["is_master"]
+  return isMaster
+
+def isLocalHost(hostName):
+  return socket.getfqdn(hostName) == socket.getfqdn()
+
+def isMasterWithMultipleInstances(component):
+  componentName = component["StackServiceComponents"]["component_name"]
+  masters = ['ZOOKEEPER_SERVER', 'HBASE_MASTER']
+  return componentName in masters
+
+def defaultNoOfMasterHosts(component):
+  componentName = component["StackServiceComponents"]["component_name"]
+  return cardinality(componentName)[min]
+
+
+# Helper dictionaries
+def cardinality(componentName):
+  return {
+    'ZOOKEEPER_SERVER': {min: 3},
+    'HBASE_MASTER': {min: 1},
+    }.get(componentName, {min:1, max:1})
+

http://git-wip-us.apache.org/repos/asf/ambari/blob/fb1e0ca5/ambari-server/src/main/resources/stacks/HDP/1.3.3/services/stack_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.3/services/stack_advisor.py b/ambari-server/src/main/resources/stacks/HDP/1.3.3/services/stack_advisor.py
new file mode 100644
index 0000000..f91efd8
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/1.3.3/services/stack_advisor.py
@@ -0,0 +1,25 @@
+#!/usr/bin/env ambari-python-wrap
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+
+import socket
+
+from stack_advisor import StackAdvisor
+
+class HDP133StackAdvisor(HDP132StackAdvisor):
+  pass
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/fb1e0ca5/ambari-server/src/main/resources/stacks/HDP/1.3/services/stack_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3/services/stack_advisor.py b/ambari-server/src/main/resources/stacks/HDP/1.3/services/stack_advisor.py
new file mode 100644
index 0000000..998ecaa
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/1.3/services/stack_advisor.py
@@ -0,0 +1,25 @@
+#!/usr/bin/env ambari-python-wrap
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+
+import socket
+
+from stack_advisor import StackAdvisor
+
+class HDP13StackAdvisor(HDP133StackAdvisor):
+  pass
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/fb1e0ca5/ambari-server/src/test/java/org/apache/ambari/server/api/services/AmbariMetaInfoTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/api/services/AmbariMetaInfoTest.java b/ambari-server/src/test/java/org/apache/ambari/server/api/services/AmbariMetaInfoTest.java
index 53a78eb..37d1af4 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/api/services/AmbariMetaInfoTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/api/services/AmbariMetaInfoTest.java
@@ -435,6 +435,15 @@ public class AmbariMetaInfoTest {
   }
 
   @Test
+  public void testGetStackParentVersions() throws Exception {
+    List<String> parents = metaInfo.getStackParentVersions(STACK_NAME_HDP, "2.0.8");
+    Assert.assertEquals(3, parents.size());
+    Assert.assertEquals("2.0.7", parents.get(0));
+    Assert.assertEquals("2.0.6", parents.get(1));
+    Assert.assertEquals("2.0.5", parents.get(2));
+  }
+
+  @Test
   public void testGetProperties() throws Exception {
     Set<PropertyInfo> properties = metaInfo.getProperties(STACK_NAME_HDP, STACK_VERSION_HDP, SERVICE_NAME_HDFS);
     Assert.assertEquals(properties.size(), PROPERTIES_CNT);

http://git-wip-us.apache.org/repos/asf/ambari/blob/fb1e0ca5/ambari-server/src/test/java/org/apache/ambari/server/api/services/stackadvisor/StackAdvisorHelperTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/api/services/stackadvisor/StackAdvisorHelperTest.java b/ambari-server/src/test/java/org/apache/ambari/server/api/services/stackadvisor/StackAdvisorHelperTest.java
index e4cf97a..87729b1 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/api/services/stackadvisor/StackAdvisorHelperTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/api/services/stackadvisor/StackAdvisorHelperTest.java
@@ -27,6 +27,7 @@ import static org.mockito.Mockito.when;
 
 import java.io.IOException;
 
+import org.apache.ambari.server.api.services.AmbariMetaInfo;
 import org.apache.ambari.server.api.services.stackadvisor.StackAdvisorRequest.StackAdvisorRequestBuilder;
 import org.apache.ambari.server.api.services.stackadvisor.StackAdvisorRequest.StackAdvisorRequestType;
 import org.apache.ambari.server.api.services.stackadvisor.commands.GetComponentLayoutRecommnedationCommand;
@@ -48,7 +49,8 @@ public class StackAdvisorHelperTest {
   public void testValidate_returnsCommandResult() throws StackAdvisorException, IOException {
     Configuration configuration = mock(Configuration.class);
     StackAdvisorRunner saRunner = mock(StackAdvisorRunner.class);
-    StackAdvisorHelper helper = spy(new StackAdvisorHelper(configuration, saRunner));
+    AmbariMetaInfo metaInfo = mock(AmbariMetaInfo.class);
+    StackAdvisorHelper helper = spy(new StackAdvisorHelper(configuration, saRunner, metaInfo));
 
     StackAdvisorCommand<ValidationResponse> command = mock(StackAdvisorCommand.class);
     ValidationResponse expected = mock(ValidationResponse.class);
@@ -69,7 +71,8 @@ public class StackAdvisorHelperTest {
       IOException {
     Configuration configuration = mock(Configuration.class);
     StackAdvisorRunner saRunner = mock(StackAdvisorRunner.class);
-    StackAdvisorHelper helper = spy(new StackAdvisorHelper(configuration, saRunner));
+    AmbariMetaInfo metaInfo = mock(AmbariMetaInfo.class);
+    StackAdvisorHelper helper = spy(new StackAdvisorHelper(configuration, saRunner, metaInfo));
 
     StackAdvisorCommand<ValidationResponse> command = mock(StackAdvisorCommand.class);
     StackAdvisorRequestType requestType = StackAdvisorRequestType.HOST_GROUPS;
@@ -88,7 +91,8 @@ public class StackAdvisorHelperTest {
   public void testRecommend_returnsCommandResult() throws StackAdvisorException, IOException {
     Configuration configuration = mock(Configuration.class);
     StackAdvisorRunner saRunner = mock(StackAdvisorRunner.class);
-    StackAdvisorHelper helper = spy(new StackAdvisorHelper(configuration, saRunner));
+    AmbariMetaInfo metaInfo = mock(AmbariMetaInfo.class);
+    StackAdvisorHelper helper = spy(new StackAdvisorHelper(configuration, saRunner, metaInfo));
 
     StackAdvisorCommand<RecommendationResponse> command = mock(StackAdvisorCommand.class);
     RecommendationResponse expected = mock(RecommendationResponse.class);
@@ -109,7 +113,8 @@ public class StackAdvisorHelperTest {
       IOException {
     Configuration configuration = mock(Configuration.class);
     StackAdvisorRunner saRunner = mock(StackAdvisorRunner.class);
-    StackAdvisorHelper helper = spy(new StackAdvisorHelper(configuration, saRunner));
+    AmbariMetaInfo metaInfo = mock(AmbariMetaInfo.class);
+    StackAdvisorHelper helper = spy(new StackAdvisorHelper(configuration, saRunner, metaInfo));
 
     StackAdvisorCommand<RecommendationResponse> command = mock(StackAdvisorCommand.class);
     StackAdvisorRequestType requestType = StackAdvisorRequestType.HOST_GROUPS;
@@ -128,7 +133,8 @@ public class StackAdvisorHelperTest {
       throws IOException, StackAdvisorException {
     Configuration configuration = mock(Configuration.class);
     StackAdvisorRunner saRunner = mock(StackAdvisorRunner.class);
-    StackAdvisorHelper helper = new StackAdvisorHelper(configuration, saRunner);
+    AmbariMetaInfo metaInfo = mock(AmbariMetaInfo.class);
+    StackAdvisorHelper helper = new StackAdvisorHelper(configuration, saRunner, metaInfo);
     StackAdvisorRequestType requestType = StackAdvisorRequestType.HOST_GROUPS;
 
     StackAdvisorCommand<RecommendationResponse> command = helper
@@ -142,7 +148,8 @@ public class StackAdvisorHelperTest {
       throws IOException, StackAdvisorException {
     Configuration configuration = mock(Configuration.class);
     StackAdvisorRunner saRunner = mock(StackAdvisorRunner.class);
-    StackAdvisorHelper helper = new StackAdvisorHelper(configuration, saRunner);
+    AmbariMetaInfo metaInfo = mock(AmbariMetaInfo.class);
+    StackAdvisorHelper helper = new StackAdvisorHelper(configuration, saRunner, metaInfo);
     StackAdvisorRequestType requestType = StackAdvisorRequestType.HOST_GROUPS;
 
     StackAdvisorCommand<ValidationResponse> command = helper.createValidationCommand(requestType);
@@ -155,7 +162,8 @@ public class StackAdvisorHelperTest {
       throws IOException, StackAdvisorException {
     Configuration configuration = mock(Configuration.class);
     StackAdvisorRunner saRunner = mock(StackAdvisorRunner.class);
-    StackAdvisorHelper helper = new StackAdvisorHelper(configuration, saRunner);
+    AmbariMetaInfo metaInfo = mock(AmbariMetaInfo.class);
+    StackAdvisorHelper helper = new StackAdvisorHelper(configuration, saRunner, metaInfo);
     StackAdvisorRequestType requestType = StackAdvisorRequestType.CONFIGURATIONS;
 
     StackAdvisorCommand<ValidationResponse> command = helper.createValidationCommand(requestType);

http://git-wip-us.apache.org/repos/asf/ambari/blob/fb1e0ca5/ambari-server/src/test/java/org/apache/ambari/server/api/services/stackadvisor/commands/StackAdvisorCommandTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/api/services/stackadvisor/commands/StackAdvisorCommandTest.java b/ambari-server/src/test/java/org/apache/ambari/server/api/services/stackadvisor/commands/StackAdvisorCommandTest.java
index 5e4e3d0..cf792ff 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/api/services/stackadvisor/commands/StackAdvisorCommandTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/api/services/stackadvisor/commands/StackAdvisorCommandTest.java
@@ -19,8 +19,10 @@
 package org.apache.ambari.server.api.services.stackadvisor.commands;
 
 import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
 import static org.junit.Assert.assertTrue;
 import static org.mockito.Matchers.any;
+import static org.mockito.Matchers.anyString;
 import static org.mockito.Mockito.doReturn;
 import static org.mockito.Mockito.doThrow;
 import static org.mockito.Mockito.mock;
@@ -29,16 +31,23 @@ import static org.mockito.Mockito.when;
 
 import java.io.File;
 import java.io.IOException;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.Iterator;
 
 import javax.ws.rs.WebApplicationException;
 
+import org.apache.ambari.server.api.services.AmbariMetaInfo;
 import org.apache.ambari.server.api.services.stackadvisor.StackAdvisorException;
 import org.apache.ambari.server.api.services.stackadvisor.StackAdvisorRequest;
 import org.apache.ambari.server.api.services.stackadvisor.StackAdvisorRequest.StackAdvisorRequestBuilder;
 import org.apache.ambari.server.api.services.stackadvisor.StackAdvisorRunner;
 import org.apache.ambari.server.api.services.stackadvisor.commands.StackAdvisorCommand.StackAdvisorData;
 import org.apache.commons.io.FileUtils;
+import org.codehaus.jackson.JsonNode;
 import org.codehaus.jackson.annotate.JsonProperty;
+import org.codehaus.jackson.node.ArrayNode;
+import org.codehaus.jackson.node.ObjectNode;
 import org.junit.After;
 import org.junit.Before;
 import org.junit.Test;
@@ -68,8 +77,10 @@ public class StackAdvisorCommandTest {
     String stackAdvisorScript = "echo";
     int requestId = 0;
     StackAdvisorRunner saRunner = mock(StackAdvisorRunner.class);
+    AmbariMetaInfo metaInfo = mock(AmbariMetaInfo.class);
+    doReturn(Collections.emptyList()).when(metaInfo).getStackParentVersions(anyString(), anyString());
     StackAdvisorCommand<TestResource> command = spy(new TestStackAdvisorCommand(recommendationsDir,
-        stackAdvisorScript, requestId, saRunner));
+        stackAdvisorScript, requestId, saRunner, metaInfo));
 
     StackAdvisorRequest request = StackAdvisorRequestBuilder.forStack("stackName", "stackVersion")
         .build();
@@ -86,8 +97,10 @@ public class StackAdvisorCommandTest {
     String stackAdvisorScript = "echo";
     int requestId = 0;
     StackAdvisorRunner saRunner = mock(StackAdvisorRunner.class);
+    AmbariMetaInfo metaInfo = mock(AmbariMetaInfo.class);
+    doReturn(Collections.emptyList()).when(metaInfo).getStackParentVersions(anyString(), anyString());
     StackAdvisorCommand<TestResource> command = spy(new TestStackAdvisorCommand(recommendationsDir,
-        stackAdvisorScript, requestId, saRunner));
+        stackAdvisorScript, requestId, saRunner, metaInfo));
 
     StackAdvisorRequest request = StackAdvisorRequestBuilder.forStack("stackName", "stackVersion")
         .build();
@@ -112,8 +125,10 @@ public class StackAdvisorCommandTest {
     String stackAdvisorScript = "echo";
     int requestId = 0;
     StackAdvisorRunner saRunner = mock(StackAdvisorRunner.class);
+    AmbariMetaInfo metaInfo = mock(AmbariMetaInfo.class);
+    doReturn(Collections.emptyList()).when(metaInfo).getStackParentVersions(anyString(), anyString());
     StackAdvisorCommand<TestResource> command = spy(new TestStackAdvisorCommand(recommendationsDir,
-        stackAdvisorScript, requestId, saRunner));
+        stackAdvisorScript, requestId, saRunner, metaInfo));
 
     StackAdvisorRequest request = StackAdvisorRequestBuilder.forStack("stackName", "stackVersion")
         .build();
@@ -137,8 +152,10 @@ public class StackAdvisorCommandTest {
     String stackAdvisorScript = "echo";
     final int requestId = 0;
     StackAdvisorRunner saRunner = mock(StackAdvisorRunner.class);
+    AmbariMetaInfo metaInfo = mock(AmbariMetaInfo.class);
+    doReturn(Collections.emptyList()).when(metaInfo).getStackParentVersions(anyString(), anyString());
     final StackAdvisorCommand<TestResource> command = spy(new TestStackAdvisorCommand(
-        recommendationsDir, stackAdvisorScript, requestId, saRunner));
+        recommendationsDir, stackAdvisorScript, requestId, saRunner, metaInfo));
 
     StackAdvisorRequest request = StackAdvisorRequestBuilder.forStack("stackName", "stackVersion")
         .build();
@@ -165,10 +182,61 @@ public class StackAdvisorCommandTest {
     assertEquals(expected, result.getType());
   }
 
+  @Test
+  public void testPopulateStackHierarchy() throws Exception {
+    File file = mock(File.class);
+    StackAdvisorRunner stackAdvisorRunner = mock(StackAdvisorRunner.class);
+    AmbariMetaInfo ambariMetaInfo = mock(AmbariMetaInfo.class);
+    StackAdvisorCommand<TestResource> cmd = new TestStackAdvisorCommand(file, "test", 1,
+        stackAdvisorRunner, ambariMetaInfo);
+    ObjectNode objectNode = (ObjectNode) cmd.mapper.readTree("{\"Versions\": " +
+        "{\"stack_name\": \"stack\", \"stack_version\":\"1.0.0\"}}");
+
+    doReturn(Arrays.asList("0.9", "0.8")).when(ambariMetaInfo).getStackParentVersions("stack", "1.0.0");
+
+    cmd.populateStackHierarchy(objectNode);
+
+    JsonNode stackHierarchy = objectNode.get("Versions").get("stack_hierarchy");
+    assertNotNull(stackHierarchy);
+    JsonNode stackName = stackHierarchy.get("stack_name");
+    assertNotNull(stackName);
+    assertEquals("stack", stackName.asText());
+    ArrayNode stackVersions = (ArrayNode) stackHierarchy.get("stack_versions");
+    assertNotNull(stackVersions);
+    assertEquals(2, stackVersions.size());
+    Iterator<JsonNode> stackVersionsElements = stackVersions.getElements();
+    assertEquals("0.9", stackVersionsElements.next().asText());
+    assertEquals("0.8", stackVersionsElements.next().asText());
+  }
+
+  @Test
+  public void testPopulateStackHierarchy_noParents() throws Exception {
+    File file = mock(File.class);
+    StackAdvisorRunner stackAdvisorRunner = mock(StackAdvisorRunner.class);
+    AmbariMetaInfo ambariMetaInfo = mock(AmbariMetaInfo.class);
+    StackAdvisorCommand<TestResource> cmd = new TestStackAdvisorCommand(file, "test", 1,
+        stackAdvisorRunner, ambariMetaInfo);
+    ObjectNode objectNode = (ObjectNode) cmd.mapper.readTree("{\"Versions\": " +
+        "{\"stack_name\": \"stack\", \"stack_version\":\"1.0.0\"}}");
+
+    doReturn(Collections.emptyList()).when(ambariMetaInfo).getStackParentVersions("stack", "1.0.0");
+
+    cmd.populateStackHierarchy(objectNode);
+
+    JsonNode stackHierarchy = objectNode.get("Versions").get("stack_hierarchy");
+    assertNotNull(stackHierarchy);
+    JsonNode stackName = stackHierarchy.get("stack_name");
+    assertNotNull(stackName);
+    assertEquals("stack", stackName.asText());
+    ArrayNode stackVersions = (ArrayNode) stackHierarchy.get("stack_versions");
+    assertNotNull(stackVersions);
+    assertEquals(0, stackVersions.size());
+  }
+
   class TestStackAdvisorCommand extends StackAdvisorCommand<TestResource> {
     public TestStackAdvisorCommand(File recommendationsDir, String stackAdvisorScript,
-        int requestId, StackAdvisorRunner saRunner) {
-      super(recommendationsDir, stackAdvisorScript, requestId, saRunner);
+        int requestId, StackAdvisorRunner saRunner, AmbariMetaInfo metaInfo) {
+      super(recommendationsDir, stackAdvisorScript, requestId, saRunner, metaInfo);
     }
 
     @Override


[21/35] git commit: AMBARI-6496. Additional fix: keberization needs hdfs_principal_name property to be saved in hadoop-env section of command file, otherwise it fails. (Alejandro Fernandez via swagle)

Posted by jo...@apache.org.
AMBARI-6496. Additional fix: keberization needs hdfs_principal_name property to be saved in hadoop-env section of command file, otherwise it fails. (Alejandro Fernandez via swagle)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/e984e9b8
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/e984e9b8
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/e984e9b8

Branch: refs/heads/branch-alerts-dev
Commit: e984e9b844721efd8eef8ddf781c9ff74486d85c
Parents: 6237724
Author: Siddharth Wagle <sw...@hortonworks.com>
Authored: Tue Aug 26 11:22:01 2014 -0700
Committer: Siddharth Wagle <sw...@hortonworks.com>
Committed: Tue Aug 26 11:22:01 2014 -0700

----------------------------------------------------------------------
 ambari-web/app/data/HDP2/secure_properties.js | 1 +
 1 file changed, 1 insertion(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/e984e9b8/ambari-web/app/data/HDP2/secure_properties.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/data/HDP2/secure_properties.js b/ambari-web/app/data/HDP2/secure_properties.js
index 6904d6e..8b93c6e 100644
--- a/ambari-web/app/data/HDP2/secure_properties.js
+++ b/ambari-web/app/data/HDP2/secure_properties.js
@@ -123,6 +123,7 @@ module.exports =
       "isOverridable": false,
       "isReconfigurable": false,
       "serviceName": "GENERAL",
+      "filename": "hadoop-env.xml",
       "category": "AMBARI"
     },
     {


[22/35] git commit: AMBARI-6010. "No such service ambari-agent" when trying to use service cmd on agent on SUSE (dlysnichenko)

Posted by jo...@apache.org.
AMBARI-6010. "No such service ambari-agent" when trying to use service cmd on agent on SUSE (dlysnichenko)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/fb9d884b
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/fb9d884b
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/fb9d884b

Branch: refs/heads/branch-alerts-dev
Commit: fb9d884be4ea4257831d591cd0e4f19c87321e69
Parents: e984e9b
Author: Lisnichenko Dmitro <dl...@hortonworks.com>
Authored: Tue Aug 26 21:26:28 2014 +0300
Committer: Lisnichenko Dmitro <dl...@hortonworks.com>
Committed: Tue Aug 26 21:26:28 2014 +0300

----------------------------------------------------------------------
 ambari-agent/pom.xml | 11 ++++++++++-
 1 file changed, 10 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/fb9d884b/ambari-agent/pom.xml
----------------------------------------------------------------------
diff --git a/ambari-agent/pom.xml b/ambari-agent/pom.xml
index ebf30fa..a487aa7 100644
--- a/ambari-agent/pom.xml
+++ b/ambari-agent/pom.xml
@@ -49,6 +49,7 @@
     <ambari.server.module>../ambari-server</ambari.server.module>
     <target.cache.dir>${project.build.directory}/cache/</target.cache.dir>
     <resource.keeper.script>${ambari.server.module}/src/main/python/ambari_server/resourceFilesKeeper.py</resource.keeper.script>
+    <init.d.dir>/etc/rc.d/init.d</init.d.dir>
   </properties>
   <build>
     <plugins>
@@ -349,7 +350,7 @@
               <groupname>root</groupname>
             </mapping>
             <mapping>
-              <directory>/etc/rc.d/init.d</directory>
+              <directory>${init.d.dir}</directory>
               <filemode>755</filemode>
               <username>root</username>
               <groupname>root</groupname>
@@ -666,4 +667,12 @@
       </extension>
     </extensions>
   </build>
+  <profiles>
+    <profile>
+      <id>suse11</id>
+      <properties>
+        <init.d.dir>/etc/init.d</init.d.dir>
+      </properties>
+    </profile>
+  </profiles>
 </project>


[18/35] git commit: AMBARI-7002 - Views: Creating instance for a view should respond with 409 conflict if that instance already exists

Posted by jo...@apache.org.
AMBARI-7002 - Views: Creating instance for a view should respond with 409 conflict if that instance already exists


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/d566bcaf
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/d566bcaf
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/d566bcaf

Branch: refs/heads/branch-alerts-dev
Commit: d566bcaf6338803fb1a91762333c3b2aec23f1d0
Parents: 6d098ca
Author: tbeerbower <tb...@hortonworks.com>
Authored: Sun Aug 24 21:14:29 2014 -0400
Committer: tbeerbower <tb...@hortonworks.com>
Committed: Tue Aug 26 12:24:40 2014 -0400

----------------------------------------------------------------------
 .../internal/ViewInstanceResourceProvider.java  |  9 +-
 .../apache/ambari/server/view/ViewRegistry.java | 15 ++++
 .../ViewInstanceResourceProviderTest.java       | 89 ++++++++++++++++++--
 3 files changed, 107 insertions(+), 6 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/d566bcaf/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ViewInstanceResourceProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ViewInstanceResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ViewInstanceResourceProvider.java
index ae02f0a..ce23e79 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ViewInstanceResourceProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ViewInstanceResourceProvider.java
@@ -19,6 +19,7 @@
 package org.apache.ambari.server.controller.internal;
 
 import org.apache.ambari.server.AmbariException;
+import org.apache.ambari.server.DuplicateResourceException;
 import org.apache.ambari.server.controller.spi.NoSuchParentResourceException;
 import org.apache.ambari.server.controller.spi.NoSuchResourceException;
 import org.apache.ambari.server.controller.spi.Predicate;
@@ -332,7 +333,13 @@ public class ViewInstanceResourceProvider extends AbstractResourceProvider {
       @Override
       public Void invoke() throws AmbariException {
         try {
-          ViewRegistry.getInstance().installViewInstance(toEntity(properties));
+          ViewRegistry       viewRegistry   = ViewRegistry.getInstance();
+          ViewInstanceEntity instanceEntity = toEntity(properties);
+
+          if (viewRegistry.instanceExists(instanceEntity)) {
+            throw new DuplicateResourceException("The instance " + instanceEntity.getName() + " already exists.");
+          }
+          viewRegistry.installViewInstance(instanceEntity);
         } catch (org.apache.ambari.view.SystemException e) {
           throw new AmbariException("Caught exception trying to create view instance.", e);
         }

http://git-wip-us.apache.org/repos/asf/ambari/blob/d566bcaf/ambari-server/src/main/java/org/apache/ambari/server/view/ViewRegistry.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/view/ViewRegistry.java b/ambari-server/src/main/java/org/apache/ambari/server/view/ViewRegistry.java
index 7542876..6e5f0a2 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/view/ViewRegistry.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/view/ViewRegistry.java
@@ -466,6 +466,21 @@ public class ViewRegistry {
   }
 
   /**
+   * Determine whether or not the given view instance exists.
+   *
+   * @param instanceEntity  the view instance entity
+   *
+   * @return true if the the given view instance exists; false otherwise
+   */
+  public boolean instanceExists(ViewInstanceEntity instanceEntity) {
+
+    ViewEntity viewEntity = getDefinition(instanceEntity.getViewName());
+
+    return viewEntity != null &&
+        (getInstanceDefinition(viewEntity.getCommonName(), viewEntity.getVersion(), instanceEntity.getName()) != null);
+  }
+
+  /**
    * Install the given view instance with its associated view.
    *
    * @param instanceEntity  the view instance entity

http://git-wip-us.apache.org/repos/asf/ambari/blob/d566bcaf/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ViewInstanceResourceProviderTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ViewInstanceResourceProviderTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ViewInstanceResourceProviderTest.java
index 52f0231..f409f07 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ViewInstanceResourceProviderTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ViewInstanceResourceProviderTest.java
@@ -18,21 +18,23 @@
 
 package org.apache.ambari.server.controller.internal;
 
-import org.apache.ambari.server.controller.spi.Predicate;
-import org.apache.ambari.server.controller.spi.Request;
 import org.apache.ambari.server.controller.spi.Resource;
+import org.apache.ambari.server.controller.spi.ResourceAlreadyExistsException;
+import org.apache.ambari.server.controller.utilities.PropertyHelper;
 import org.apache.ambari.server.orm.entities.ViewEntity;
 import org.apache.ambari.server.orm.entities.ViewInstanceDataEntity;
 import org.apache.ambari.server.orm.entities.ViewInstanceEntity;
 import org.apache.ambari.server.orm.entities.ViewInstancePropertyEntity;
 import org.apache.ambari.server.orm.entities.ViewParameterEntity;
-import org.easymock.EasyMock;
+import org.apache.ambari.server.view.ViewRegistry;
+import org.easymock.Capture;
+import org.junit.Assert;
+import org.junit.Before;
 import org.junit.Test;
 
-import java.util.ArrayList;
 import java.util.Arrays;
-import java.util.Collection;
 import java.util.Collections;
+import java.util.HashMap;
 import java.util.HashSet;
 import java.util.Map;
 import java.util.Set;
@@ -42,6 +44,17 @@ import static org.easymock.EasyMock.*;
 
 public class ViewInstanceResourceProviderTest {
 
+  private static final ViewRegistry singleton = createMock(ViewRegistry.class);
+
+  static {
+    ViewRegistry.initInstance(singleton);
+  }
+
+  @Before
+  public void before() {
+    reset(singleton);
+  }
+
   @Test
   public void testToResource() throws Exception {
     ViewInstanceResourceProvider provider = new ViewInstanceResourceProvider();
@@ -79,4 +92,70 @@ public class ViewInstanceResourceProviderTest {
     assertEquals("val3", props.get("par3"));
     assertNull(props.get("par2"));
   }
+
+  @Test
+  public void testCreateResources() throws Exception {
+    ViewInstanceResourceProvider provider = new ViewInstanceResourceProvider();
+
+    Set<Map<String, Object>> properties = new HashSet<Map<String, Object>>();
+
+    Map<String, Object> propertyMap = new HashMap<String, Object>();
+
+    propertyMap.put(ViewInstanceResourceProvider.VIEW_NAME_PROPERTY_ID, "V1");
+    propertyMap.put(ViewInstanceResourceProvider.VIEW_VERSION_PROPERTY_ID, "1.0.0");
+    propertyMap.put(ViewInstanceResourceProvider.INSTANCE_NAME_PROPERTY_ID, "I1");
+
+    properties.add(propertyMap);
+
+    ViewInstanceEntity viewInstanceEntity = new ViewInstanceEntity();
+    viewInstanceEntity.setViewName("V1{1.0.0}");
+    viewInstanceEntity.setName("I1");
+
+    expect(singleton.instanceExists(viewInstanceEntity)).andReturn(false);
+    expect(singleton.getInstanceDefinition("V1", "1.0.0", "I1")).andReturn(viewInstanceEntity);
+
+    Capture<ViewInstanceEntity> instanceEntityCapture = new Capture<ViewInstanceEntity>();
+    singleton.installViewInstance(capture(instanceEntityCapture));
+
+    replay(singleton);
+
+    provider.createResources(PropertyHelper.getCreateRequest(properties, null));
+
+    Assert.assertEquals(viewInstanceEntity, instanceEntityCapture.getValue());
+
+    verify(singleton);
+  }
+
+  @Test
+  public void testCreateResources_existingInstance() throws Exception {
+    ViewInstanceResourceProvider provider = new ViewInstanceResourceProvider();
+
+    Set<Map<String, Object>> properties = new HashSet<Map<String, Object>>();
+
+    Map<String, Object> propertyMap = new HashMap<String, Object>();
+
+    propertyMap.put(ViewInstanceResourceProvider.VIEW_NAME_PROPERTY_ID, "V1");
+    propertyMap.put(ViewInstanceResourceProvider.VIEW_VERSION_PROPERTY_ID, "1.0.0");
+    propertyMap.put(ViewInstanceResourceProvider.INSTANCE_NAME_PROPERTY_ID, "I1");
+
+    properties.add(propertyMap);
+
+    ViewInstanceEntity viewInstanceEntity = new ViewInstanceEntity();
+    viewInstanceEntity.setViewName("V1{1.0.0}");
+    viewInstanceEntity.setName("I1");
+
+    expect(singleton.instanceExists(viewInstanceEntity)).andReturn(true);
+    expect(singleton.getInstanceDefinition("V1", "1.0.0", "I1")).andReturn(viewInstanceEntity);
+
+    replay(singleton);
+
+    try {
+      provider.createResources(PropertyHelper.getCreateRequest(properties, null));
+      fail("Expected ResourceAlreadyExistsException.");
+    } catch (ResourceAlreadyExistsException e) {
+      // expected
+    }
+
+    verify(singleton);
+  }
 }
\ No newline at end of file


[24/35] git commit: AMBARI-7018. Stack API should provide display name for services and components. (jaimin)

Posted by jo...@apache.org.
AMBARI-7018. Stack API should provide display name for services and components. (jaimin)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/02ee3d44
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/02ee3d44
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/02ee3d44

Branch: refs/heads/branch-alerts-dev
Commit: 02ee3d440596c3e159985830ede4f6fe358a576b
Parents: fb9d884
Author: Jaimin Jetly <ja...@hortonworks.com>
Authored: Tue Aug 26 12:40:41 2014 -0700
Committer: Jaimin Jetly <ja...@hortonworks.com>
Committed: Tue Aug 26 12:40:53 2014 -0700

----------------------------------------------------------------------
 .../server/api/util/StackExtensionHelper.java   |  9 +++
 .../StackServiceComponentResponse.java          | 33 +++++++++--
 .../server/controller/StackServiceResponse.java | 16 ++++-
 .../StackServiceComponentResourceProvider.java  | 22 +++----
 .../internal/StackServiceResourceProvider.java  | 22 +++----
 .../ambari/server/state/ComponentInfo.java      | 14 ++++-
 .../apache/ambari/server/state/ServiceInfo.java |  9 +++
 .../src/main/resources/properties.json          |  2 +
 .../HDP/1.3.2/services/GANGLIA/metainfo.xml     |  3 +
 .../HDP/1.3.2/services/HBASE/metainfo.xml       |  4 ++
 .../stacks/HDP/1.3.2/services/HDFS/metainfo.xml |  5 ++
 .../stacks/HDP/1.3.2/services/HIVE/metainfo.xml | 11 +++-
 .../HDP/1.3.2/services/MAPREDUCE/metainfo.xml   |  5 ++
 .../HDP/1.3.2/services/NAGIOS/metainfo.xml      |  2 +
 .../HDP/1.3.2/services/OOZIE/metainfo.xml       |  3 +
 .../stacks/HDP/1.3.2/services/PIG/metainfo.xml  |  2 +
 .../HDP/1.3.2/services/SQOOP/metainfo.xml       |  2 +
 .../HDP/1.3.2/services/WEBHCAT/metainfo.xml     |  4 +-
 .../HDP/1.3.2/services/ZOOKEEPER/metainfo.xml   |  3 +
 .../HDP/2.0.6/services/FLUME/metainfo.xml       |  2 +
 .../HDP/2.0.6/services/GANGLIA/metainfo.xml     |  3 +
 .../HDP/2.0.6/services/HBASE/metainfo.xml       |  4 ++
 .../stacks/HDP/2.0.6/services/HDFS/metainfo.xml |  7 +++
 .../stacks/HDP/2.0.6/services/HIVE/metainfo.xml | 11 +++-
 .../HDP/2.0.6/services/NAGIOS/metainfo.xml      |  2 +
 .../HDP/2.0.6/services/OOZIE/metainfo.xml       |  3 +
 .../stacks/HDP/2.0.6/services/PIG/metainfo.xml  |  2 +
 .../HDP/2.0.6/services/SQOOP/metainfo.xml       |  2 +
 .../HDP/2.0.6/services/WEBHCAT/metainfo.xml     |  4 +-
 .../stacks/HDP/2.0.6/services/YARN/metainfo.xml |  7 +++
 .../HDP/2.0.6/services/ZOOKEEPER/metainfo.xml   |  2 +
 .../stacks/HDP/2.1/services/FALCON/metainfo.xml |  3 +
 .../stacks/HDP/2.1/services/STORM/metainfo.xml  |  6 ++
 .../stacks/HDP/2.1/services/TEZ/metainfo.xml    |  2 +
 .../stacks/HDP/2.1/services/YARN/metainfo.xml   |  2 +
 .../api/util/StackExtensionHelperTest.java      |  2 +
 .../HDP/2.0.6/services/NAGIOS/metainfo.xml      |  2 +
 .../HDP/2.0.6/services/WEBHCAT/metainfo.xml     |  1 +
 .../stacks/HDP/2.0.6/services/YARN/metainfo.xml |  4 ++
 .../HDP/2.0.7/services/HBASE/metainfo.xml       |  4 ++
 .../stacks/HDP/2.0.7/services/HDFS/metainfo.xml |  7 +++
 .../stacks/HDP/2.0.7/services/HIVE/metainfo.xml |  5 ++
 .../HDP/2.0.7/services/ZOOKEEPER/metainfo.xml   |  3 +
 .../HDP/2.0.8/services/SQOOP/metainfo.xml       |  1 +
 .../HDP/2.1.1/services/STORM/metainfo.xml       |  4 ++
 ambari-web/app/mappers/stack_service_mapper.js  |  2 +
 ambari-web/app/mixins.js                        |  1 -
 ambari-web/app/mixins/models/service_mixin.js   | 30 ----------
 ambari-web/app/models/service.js                |  7 ++-
 ambari-web/app/models/stack_service.js          |  5 +-
 .../app/models/stack_service_component.js       | 10 +---
 ambari-web/app/utils/helper.js                  | 15 ++++-
 ambari-web/test/app_test.js                     |  2 +-
 ambari-web/test/service_components.js           | 61 ++++++++++++++++++++
 54 files changed, 312 insertions(+), 87 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/02ee3d44/ambari-server/src/main/java/org/apache/ambari/server/api/util/StackExtensionHelper.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/api/util/StackExtensionHelper.java b/ambari-server/src/main/java/org/apache/ambari/server/api/util/StackExtensionHelper.java
index c39b2ec..fb3ddc9 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/api/util/StackExtensionHelper.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/api/util/StackExtensionHelper.java
@@ -138,6 +138,10 @@ public class StackExtensionHelper {
     mergedServiceInfo.setName(childService.getName());
     mergedServiceInfo.setComment(childService.getComment());
     mergedServiceInfo.setVersion(childService.getVersion());
+    mergedServiceInfo.setDisplayName(
+        childService.getDisplayName() != null ?
+            childService.getDisplayName() :
+            parentService.getDisplayName());
     mergedServiceInfo.setConfigDependencies(
         childService.getConfigDependencies() != null ?
             childService.getConfigDependencies() :
@@ -306,10 +310,15 @@ public class StackExtensionHelper {
       result.setCommandScript(parent.getCommandScript());
     }
     //keep the same semantic as for ServiceInfo
+    result.setDisplayName(
+        child.getDisplayName() != null ?
+            child.getDisplayName() : parent.getDisplayName());
+
     result.setConfigDependencies(
         child.getConfigDependencies() != null ?
             child.getConfigDependencies() : parent.getConfigDependencies());
 
+
 //    HashSet downloadSource = child.getDownloadSource();
 //    if (downloadSource != null) {
 //      result.setDownloadSource(child.getDownloadSource());

http://git-wip-us.apache.org/repos/asf/ambari/blob/02ee3d44/ambari-server/src/main/java/org/apache/ambari/server/controller/StackServiceComponentResponse.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/StackServiceComponentResponse.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/StackServiceComponentResponse.java
index d76f350..47d153b 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/StackServiceComponentResponse.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/StackServiceComponentResponse.java
@@ -18,14 +18,14 @@
 
 package org.apache.ambari.server.controller;
 
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.List;
-
 import org.apache.ambari.server.state.AutoDeployInfo;
 import org.apache.ambari.server.state.ComponentInfo;
 import org.apache.ambari.server.state.CustomCommandDefinition;
 
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+
 /**
  * Stack service component response.
  */
@@ -51,6 +51,11 @@ public class StackServiceComponentResponse {
   private String componentName;
 
   /**
+   * component display name
+   */
+  private String componentDisplayName;
+
+  /**
    * component category
    */
   private String componentCategory;
@@ -89,6 +94,7 @@ public class StackServiceComponentResponse {
    */
   public StackServiceComponentResponse(ComponentInfo component) {
     componentName = component.getName();
+    componentDisplayName = component.getDisplayName();
     componentCategory = component.getCategory();
     isClient = component.isClient();
     isMaster = component.isMaster();
@@ -180,6 +186,25 @@ public class StackServiceComponentResponse {
   }
 
   /**
+   * Get component display name.
+   *
+   * @return component display name
+   */
+
+  public String getComponentDisplayName() {
+    return componentDisplayName;
+  }
+
+  /**
+   * Set component display name.
+   *
+   * @param componentDisplayName  component display name
+   */
+  public void setComponentDisplayName(String componentDisplayName) {
+    this.componentDisplayName = componentDisplayName;
+  }
+
+  /**
    * Get component category.
    *
    * @return component category

http://git-wip-us.apache.org/repos/asf/ambari/blob/02ee3d44/ambari-server/src/main/java/org/apache/ambari/server/controller/StackServiceResponse.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/StackServiceResponse.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/StackServiceResponse.java
index 6e7e8e0..9c986b1 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/StackServiceResponse.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/StackServiceResponse.java
@@ -18,19 +18,20 @@
 
 package org.apache.ambari.server.controller;
 
+import org.apache.ambari.server.state.CustomCommandDefinition;
+import org.apache.ambari.server.state.ServiceInfo;
+
 import java.util.ArrayList;
 import java.util.Collections;
 import java.util.List;
 import java.util.Map;
 
-import org.apache.ambari.server.state.CustomCommandDefinition;
-import org.apache.ambari.server.state.ServiceInfo;
-
 public class StackServiceResponse {
 
   private String stackName;
   private String stackVersion;
   private String serviceName;
+  private String serviceDisplayName;
   private String userName;
   private String comments;
   private String serviceVersion;
@@ -48,6 +49,7 @@ public class StackServiceResponse {
    */
   public StackServiceResponse(ServiceInfo service) {
     serviceName = service.getName();
+    serviceDisplayName = service.getDisplayName();
     userName = null;
     comments = service.getComment();
     serviceVersion = service.getVersion();
@@ -91,6 +93,14 @@ public class StackServiceResponse {
     this.serviceName = serviceName;
   }
 
+  public String getServiceDisplayName() {
+    return serviceDisplayName;
+  }
+
+  public void setServiceDisplayName(String serviceDisplayName) {
+    this.serviceDisplayName = serviceDisplayName;
+  }
+
   public String getUserName() {
     return userName;
   }

http://git-wip-us.apache.org/repos/asf/ambari/blob/02ee3d44/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/StackServiceComponentResourceProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/StackServiceComponentResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/StackServiceComponentResourceProvider.java
index e7e50d0..1f23773 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/StackServiceComponentResourceProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/StackServiceComponentResourceProvider.java
@@ -18,27 +18,17 @@
 
 package org.apache.ambari.server.controller.internal;
 
-import java.util.Arrays;
-import java.util.Collections;
-import java.util.HashSet;
-import java.util.Map;
-import java.util.Set;
-
 import org.apache.ambari.server.AmbariException;
 import org.apache.ambari.server.controller.AmbariManagementController;
 import org.apache.ambari.server.controller.StackServiceComponentRequest;
 import org.apache.ambari.server.controller.StackServiceComponentResponse;
-import org.apache.ambari.server.controller.spi.NoSuchParentResourceException;
-import org.apache.ambari.server.controller.spi.NoSuchResourceException;
-import org.apache.ambari.server.controller.spi.Predicate;
-import org.apache.ambari.server.controller.spi.Request;
-import org.apache.ambari.server.controller.spi.Resource;
+import org.apache.ambari.server.controller.spi.*;
 import org.apache.ambari.server.controller.spi.Resource.Type;
-import org.apache.ambari.server.controller.spi.SystemException;
-import org.apache.ambari.server.controller.spi.UnsupportedPropertyException;
 import org.apache.ambari.server.controller.utilities.PropertyHelper;
 import org.apache.ambari.server.state.AutoDeployInfo;
 
+import java.util.*;
+
 public class StackServiceComponentResourceProvider extends
     ReadOnlyResourceProvider {
 
@@ -54,6 +44,9 @@ public class StackServiceComponentResourceProvider extends
   private static final String COMPONENT_NAME_PROPERTY_ID = PropertyHelper.getPropertyId(
       "StackServiceComponents", "component_name");
 
+  private static final String COMPONENT_DISPLAY_NAME_PROPERTY_ID = PropertyHelper.getPropertyId(
+          "StackServiceComponents", "display_name");
+
   private static final String COMPONENT_CATEGORY_PROPERTY_ID = PropertyHelper.getPropertyId(
       "StackServiceComponents", "component_category");
 
@@ -127,6 +120,9 @@ public class StackServiceComponentResourceProvider extends
       setResourceProperty(resource, COMPONENT_NAME_PROPERTY_ID,
           response.getComponentName(), requestedIds);
 
+      setResourceProperty(resource, COMPONENT_DISPLAY_NAME_PROPERTY_ID,
+              response.getComponentDisplayName(), requestedIds);
+
       setResourceProperty(resource, COMPONENT_CATEGORY_PROPERTY_ID,
           response.getComponentCategory(), requestedIds);
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/02ee3d44/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/StackServiceResourceProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/StackServiceResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/StackServiceResourceProvider.java
index f6a6141..1296ba1 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/StackServiceResourceProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/StackServiceResourceProvider.java
@@ -19,26 +19,16 @@
 
 package org.apache.ambari.server.controller.internal;
 
-import java.util.Arrays;
-import java.util.Collections;
-import java.util.HashSet;
-import java.util.Map;
-import java.util.Set;
-
 import org.apache.ambari.server.AmbariException;
 import org.apache.ambari.server.controller.AmbariManagementController;
 import org.apache.ambari.server.controller.StackServiceRequest;
 import org.apache.ambari.server.controller.StackServiceResponse;
-import org.apache.ambari.server.controller.spi.NoSuchParentResourceException;
-import org.apache.ambari.server.controller.spi.NoSuchResourceException;
-import org.apache.ambari.server.controller.spi.Predicate;
-import org.apache.ambari.server.controller.spi.Request;
-import org.apache.ambari.server.controller.spi.Resource;
+import org.apache.ambari.server.controller.spi.*;
 import org.apache.ambari.server.controller.spi.Resource.Type;
-import org.apache.ambari.server.controller.spi.SystemException;
-import org.apache.ambari.server.controller.spi.UnsupportedPropertyException;
 import org.apache.ambari.server.controller.utilities.PropertyHelper;
 
+import java.util.*;
+
 public class StackServiceResourceProvider extends ReadOnlyResourceProvider {
 
   protected static final String SERVICE_NAME_PROPERTY_ID = PropertyHelper.getPropertyId(
@@ -50,6 +40,9 @@ public class StackServiceResourceProvider extends ReadOnlyResourceProvider {
   public static final String STACK_VERSION_PROPERTY_ID = PropertyHelper.getPropertyId(
       "StackServices", "stack_version");
 
+  private static final String SERVICE_DISPLAY_NAME_PROPERTY_ID = PropertyHelper.getPropertyId(
+      "StackServices", "display_name");
+
   private static final String USER_NAME_PROPERTY_ID = PropertyHelper.getPropertyId(
       "StackServices", "user_name");
 
@@ -119,6 +112,9 @@ public class StackServiceResourceProvider extends ReadOnlyResourceProvider {
       setResourceProperty(resource, SERVICE_NAME_PROPERTY_ID,
           response.getServiceName(), requestedIds);
 
+      setResourceProperty(resource, SERVICE_DISPLAY_NAME_PROPERTY_ID,
+              response.getServiceDisplayName(), requestedIds);
+
       setResourceProperty(resource, USER_NAME_PROPERTY_ID,
           response.getUserName(), requestedIds);
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/02ee3d44/ambari-server/src/main/java/org/apache/ambari/server/state/ComponentInfo.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/ComponentInfo.java b/ambari-server/src/main/java/org/apache/ambari/server/state/ComponentInfo.java
index 172b1ea..a23b3c3 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/ComponentInfo.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/ComponentInfo.java
@@ -18,18 +18,18 @@
 
 package org.apache.ambari.server.state;
 
-import java.util.ArrayList;
-import java.util.List;
-
 import javax.xml.bind.annotation.XmlAccessType;
 import javax.xml.bind.annotation.XmlAccessorType;
 import javax.xml.bind.annotation.XmlElement;
 import javax.xml.bind.annotation.XmlElementWrapper;
 import javax.xml.bind.annotation.XmlElements;
+import java.util.ArrayList;
+import java.util.List;
 
 @XmlAccessorType(XmlAccessType.FIELD)
 public class ComponentInfo {
   private String name;
+  private String displayName;
   private String category;
   private boolean deleted;
   private String cardinality;
@@ -95,6 +95,14 @@ public class ComponentInfo {
     this.name = name;
   }
 
+  public String getDisplayName() {
+    return displayName;
+  }
+
+  public void setDisplayName(String displayName) {
+    this.displayName = displayName;
+  }
+
   public String getCategory() {
     return category;
   }

http://git-wip-us.apache.org/repos/asf/ambari/blob/02ee3d44/ambari-server/src/main/java/org/apache/ambari/server/state/ServiceInfo.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/ServiceInfo.java b/ambari-server/src/main/java/org/apache/ambari/server/state/ServiceInfo.java
index fe2c5f3..de70620 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/ServiceInfo.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/ServiceInfo.java
@@ -52,6 +52,7 @@ public class ServiceInfo {
   private String schemaVersion;
 
   private String name;
+  private String displayName;
   private String version;
   private String comment;
   private List<PropertyInfo> properties;
@@ -163,6 +164,14 @@ public class ServiceInfo {
     this.name = name;
   }
 
+  public String getDisplayName() {
+    return displayName;
+  }
+
+  public void setDisplayName(String displayName) {
+    this.displayName = displayName;
+  }
+
   public String getVersion() {
     return version;
   }

http://git-wip-us.apache.org/repos/asf/ambari/blob/02ee3d44/ambari-server/src/main/resources/properties.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/properties.json b/ambari-server/src/main/resources/properties.json
index bc2ad22..5777935 100644
--- a/ambari-server/src/main/resources/properties.json
+++ b/ambari-server/src/main/resources/properties.json
@@ -226,6 +226,7 @@
         "StackServices/stack_name",
         "StackServices/stack_version",
         "StackServices/service_name",
+        "StackServices/display_name",
         "StackServices/user_name",
         "StackServices/comments",
         "StackServices/service_version",
@@ -251,6 +252,7 @@
         "StackServiceComponents/stack_version",
         "StackServiceComponents/service_name",
         "StackServiceComponents/component_name",
+        "StackServiceComponents/display_name",
         "StackServiceComponents/component_category",
         "StackServiceComponents/is_client",
         "StackServiceComponents/is_master",

http://git-wip-us.apache.org/repos/asf/ambari/blob/02ee3d44/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/GANGLIA/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/GANGLIA/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/GANGLIA/metainfo.xml
index dc3ac6a..2003145 100644
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/GANGLIA/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/GANGLIA/metainfo.xml
@@ -20,11 +20,13 @@
   <services>
     <service>
       <name>GANGLIA</name>
+      <displayName>Ganglia</displayName>
       <comment>Ganglia Metrics Collection system (&lt;a href=&quot;http://oss.oetiker.ch/rrdtool/&quot; target=&quot;_blank&quot;&gt;RRDTool&lt;/a&gt; will be installed too)</comment>
       <version>3.5.0</version>
       <components>
         <component>
           <name>GANGLIA_SERVER</name>
+          <displayName>Ganglia Server</displayName>
           <category>MASTER</category>
           <cardinality>1</cardinality>
           <commandScript>
@@ -36,6 +38,7 @@
 
         <component>
           <name>GANGLIA_MONITOR</name>
+          <displayName>Ganglia Monitor</displayName>
           <category>SLAVE</category>
           <cardinality>ALL</cardinality>
           <auto-deploy>

http://git-wip-us.apache.org/repos/asf/ambari/blob/02ee3d44/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HBASE/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HBASE/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HBASE/metainfo.xml
index fa53125..f209475 100644
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HBASE/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HBASE/metainfo.xml
@@ -20,6 +20,7 @@
   <services>
     <service>
       <name>HBASE</name>
+      <displayName>HBase</displayName>
       <comment>Non-relational distributed database and centralized service for configuration management &amp;
         synchronization
       </comment>
@@ -27,6 +28,7 @@
       <components>
         <component>
           <name>HBASE_MASTER</name>
+          <displayName>HBase Master</displayName>
           <category>MASTER</category>
           <cardinality>1+</cardinality>
           <dependencies>
@@ -65,6 +67,7 @@
 
         <component>
           <name>HBASE_REGIONSERVER</name>
+          <displayName>RegionServer</displayName>
           <category>SLAVE</category>
           <cardinality>1+</cardinality>
           <commandScript>
@@ -85,6 +88,7 @@
 
         <component>
           <name>HBASE_CLIENT</name>
+          <displayName>HBase Client</displayName>
           <category>CLIENT</category>
           <cardinality>1+</cardinality>
           <commandScript>

http://git-wip-us.apache.org/repos/asf/ambari/blob/02ee3d44/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/metainfo.xml
index c453905..7112c31 100644
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/metainfo.xml
@@ -20,12 +20,14 @@
   <services>
     <service>
       <name>HDFS</name>
+      <displayName>HDFS</displayName>
       <comment>Apache Hadoop Distributed File System</comment>
       <version>1.2.0.1.3.3.0</version>
 
       <components>
         <component>
           <name>NAMENODE</name>
+          <displayName>NameNode</displayName>
           <category>MASTER</category>
           <cardinality>1-2</cardinality>
           <commandScript>
@@ -47,6 +49,7 @@
 
         <component>
           <name>DATANODE</name>
+          <displayName>DataNode</displayName>
           <category>SLAVE</category>
           <cardinality>1+</cardinality>
           <commandScript>
@@ -58,6 +61,7 @@
 
         <component>
           <name>SECONDARY_NAMENODE</name>
+          <displayName>SNameNode</displayName>
           <category>MASTER</category>
           <cardinality>1</cardinality>
           <commandScript>
@@ -69,6 +73,7 @@
 
         <component>
           <name>HDFS_CLIENT</name>
+          <displayName>HDFS Client</displayName>
           <category>CLIENT</category>
           <cardinality>1+</cardinality>
           <commandScript>

http://git-wip-us.apache.org/repos/asf/ambari/blob/02ee3d44/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/metainfo.xml
index d5dc506..2c58c44 100644
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/metainfo.xml
@@ -20,12 +20,14 @@
   <services>
     <service>
       <name>HIVE</name>
+      <displayName>Hive</displayName>
       <comment>Data warehouse system for ad-hoc queries &amp; analysis of large datasets and table &amp; storage management service</comment>
       <version>0.11.0.1.3.3.0</version>
       <components>
 
         <component>
           <name>HIVE_METASTORE</name>
+          <displayName>Hive Metastore</displayName>
           <category>MASTER</category>
           <cardinality>1</cardinality>
           <auto-deploy>
@@ -41,6 +43,7 @@
 
         <component>
           <name>HIVE_SERVER</name>
+          <displayName>HiveServer2</displayName>
           <category>MASTER</category>
           <cardinality>1</cardinality>
           <dependencies>
@@ -68,6 +71,7 @@
 
         <component>
           <name>MYSQL_SERVER</name>
+          <displayName>MySQL Server</displayName>
           <category>MASTER</category>
           <cardinality>0-1</cardinality>
           <commandScript>
@@ -78,6 +82,7 @@
 
         <component>
           <name>HIVE_CLIENT</name>
+          <displayName>Hive Client</displayName>
           <category>CLIENT</category>
           <cardinality>1+</cardinality>
           <commandScript>
@@ -163,11 +168,15 @@
 
     <service>
       <name>HCATALOG</name>
-      <comment>This is comment for HCATALOG service</comment>
+      <displayName>HCatalog</displayName>
+      <comment>A table and storage management layer for Hadoop that enables users with different data processing tools
+        to more easily read and write data on the grid.
+      </comment>
       <version>0.11.0.1.3.3.0</version>
       <components>
         <component>
           <name>HCAT</name>
+          <displayName>HCat</displayName>
           <category>CLIENT</category>
           <commandScript>
             <script>scripts/hcat_client.py</script>

http://git-wip-us.apache.org/repos/asf/ambari/blob/02ee3d44/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/MAPREDUCE/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/MAPREDUCE/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/MAPREDUCE/metainfo.xml
index 7ab788a..facdc07 100644
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/MAPREDUCE/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/MAPREDUCE/metainfo.xml
@@ -21,11 +21,13 @@
   <services>
     <service>
       <name>MAPREDUCE</name>
+      <displayName>MapReduce</displayName>
       <comment>Apache Hadoop Distributed Processing Framework</comment>
       <version>1.2.0.1.3.3.0</version>
       <components>
         <component>
           <name>JOBTRACKER</name>
+          <displayName>JobTracker</displayName>
           <category>MASTER</category>
           <cardinality>1</cardinality>
           <commandScript>
@@ -47,6 +49,7 @@
 
         <component>
           <name>TASKTRACKER</name>
+          <displayName>TaskTracker</displayName>
           <category>SLAVE</category>
           <cardinality>1+</cardinality>
           <commandScript>
@@ -58,6 +61,7 @@
 
         <component>
           <name>MAPREDUCE_CLIENT</name>
+          <displayName>MapReduce Client</displayName>
           <category>CLIENT</category>
           <cardinality>1+</cardinality>
           <commandScript>
@@ -91,6 +95,7 @@
               
         <component>
           <name>HISTORYSERVER</name>
+          <displayName>History Server</displayName>
           <category>MASTER</category>
           <cardinality>1</cardinality>
           <auto-deploy>

http://git-wip-us.apache.org/repos/asf/ambari/blob/02ee3d44/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/NAGIOS/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/NAGIOS/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/NAGIOS/metainfo.xml
index 79fecbc..c926429 100644
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/NAGIOS/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/NAGIOS/metainfo.xml
@@ -20,11 +20,13 @@
   <services>
     <service>
       <name>NAGIOS</name>
+      <displayName>Nagios</displayName>
       <comment>Nagios Monitoring and Alerting system</comment>
       <version>3.5.0</version>
       <components>
         <component>
           <name>NAGIOS_SERVER</name>
+          <displayName>Nagios Server</displayName>
           <category>MASTER</category>
           <cardinality>1</cardinality>
           <dependencies>

http://git-wip-us.apache.org/repos/asf/ambari/blob/02ee3d44/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/OOZIE/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/OOZIE/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/OOZIE/metainfo.xml
index fb8397e..cc86c8e 100644
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/OOZIE/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/OOZIE/metainfo.xml
@@ -20,12 +20,14 @@
   <services>
     <service>
       <name>OOZIE</name>
+      <displayName>Oozie</displayName>
       <comment>System for workflow coordination and execution of Apache Hadoop jobs.  This also includes the installation of the optional Oozie Web Console which relies on and will install the &lt;a target="_blank" href="http://www.sencha.com/legal/open-source-faq/"&gt;ExtJS&lt;/a&gt; Library.
       </comment>
       <version>3.3.2.1.3.3.0</version>
       <components>
         <component>
           <name>OOZIE_SERVER</name>
+          <displayName>Oozie Server</displayName>
           <category>MASTER</category>
           <cardinality>1</cardinality>
           <dependencies>
@@ -53,6 +55,7 @@
 
         <component>
           <name>OOZIE_CLIENT</name>
+          <displayName>Oozie Client</displayName>
           <category>CLIENT</category>
           <cardinality>1+</cardinality>
           <dependencies>

http://git-wip-us.apache.org/repos/asf/ambari/blob/02ee3d44/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/PIG/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/PIG/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/PIG/metainfo.xml
index bbc078f..8e87a50 100644
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/PIG/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/PIG/metainfo.xml
@@ -20,11 +20,13 @@
   <services>
     <service>
       <name>PIG</name>
+      <displayName>Pig</displayName>
       <comment>Scripting platform for analyzing large datasets</comment>
       <version>0.11.1.1.3.3.0</version>
       <components>
         <component>
           <name>PIG</name>
+          <displayName>Pig</displayName>
           <category>CLIENT</category>
           <cardinality>1+</cardinality>
           <commandScript>

http://git-wip-us.apache.org/repos/asf/ambari/blob/02ee3d44/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/SQOOP/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/SQOOP/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/SQOOP/metainfo.xml
index 1c25945..42e0bf8 100644
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/SQOOP/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/SQOOP/metainfo.xml
@@ -20,6 +20,7 @@
   <services>
     <service>
       <name>SQOOP</name>
+      <displayName>Sqoop</displayName>
       <comment>Tool for transferring bulk data between Apache Hadoop and
         structured data stores such as relational databases
       </comment>
@@ -28,6 +29,7 @@
       <components>
         <component>
           <name>SQOOP</name>
+          <displayName>Pig</displayName>
           <category>CLIENT</category>
           <cardinality>1+</cardinality>
           <dependencies>

http://git-wip-us.apache.org/repos/asf/ambari/blob/02ee3d44/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/WEBHCAT/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/WEBHCAT/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/WEBHCAT/metainfo.xml
index 642f5d0..b115c28 100644
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/WEBHCAT/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/WEBHCAT/metainfo.xml
@@ -20,11 +20,13 @@
   <services>
     <service>
       <name>WEBHCAT</name>
-      <comment>This is comment for WEBHCAT service</comment>
+      <displayName>WebHCat</displayName>
+      <comment>Provides a REST-like web API for HCatalog and related Hadoop components.</comment>
       <version>0.11.0.1.3.3.0</version>
       <components>
         <component>
           <name>WEBHCAT_SERVER</name>
+          <displayName>WebHCat Server</displayName>
           <category>MASTER</category>
           <cardinality>1</cardinality>
           <dependencies>

http://git-wip-us.apache.org/repos/asf/ambari/blob/02ee3d44/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/ZOOKEEPER/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/ZOOKEEPER/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/ZOOKEEPER/metainfo.xml
index 3db40c1..4cdb1c8 100644
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/ZOOKEEPER/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/ZOOKEEPER/metainfo.xml
@@ -20,12 +20,14 @@
   <services>
     <service>
       <name>ZOOKEEPER</name>
+      <displayName>ZooKeeper</displayName>
       <comment>Centralized service which provides highly reliable distributed coordination</comment>
       <version>3.4.5.1.3.3.0</version>
       <components>
 
         <component>
           <name>ZOOKEEPER_SERVER</name>
+          <displayName>ZooKeeper Server</displayName>
           <category>MASTER</category>
           <cardinality>1+</cardinality>
           <commandScript>
@@ -37,6 +39,7 @@
 
         <component>
           <name>ZOOKEEPER_CLIENT</name>
+          <displayName>ZooKeeper Client</displayName>
           <category>CLIENT</category>
           <cardinality>1+</cardinality>
           <commandScript>

http://git-wip-us.apache.org/repos/asf/ambari/blob/02ee3d44/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/FLUME/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/FLUME/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/FLUME/metainfo.xml
index 5b73548..4f6bbd9 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/FLUME/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/FLUME/metainfo.xml
@@ -20,11 +20,13 @@
   <services>
     <service>
       <name>FLUME</name>
+      <displayName>Flume</displayName>
       <comment>Data management and processing platform</comment>
       <version>1.4.0.2.0</version>
       <components>
         <component>
           <name>FLUME_HANDLER</name>
+          <displayName>Flume</displayName>
           <category>SLAVE</category>
           <cardinality>1+</cardinality>
           <commandScript>

http://git-wip-us.apache.org/repos/asf/ambari/blob/02ee3d44/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/GANGLIA/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/GANGLIA/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/GANGLIA/metainfo.xml
index cfbfeab..d481db3 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/GANGLIA/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/GANGLIA/metainfo.xml
@@ -20,11 +20,13 @@
   <services>
     <service>
       <name>GANGLIA</name>
+      <displayName>Ganglia</displayName>
       <comment>Ganglia Metrics Collection system (&lt;a href=&quot;http://oss.oetiker.ch/rrdtool/&quot; target=&quot;_blank&quot;&gt;RRDTool&lt;/a&gt; will be installed too)</comment>
       <version>3.5.0</version>
       <components>
         <component>
           <name>GANGLIA_SERVER</name>
+          <displayName>Ganglia Server</displayName>
           <category>MASTER</category>
           <cardinality>1</cardinality>
           <commandScript>
@@ -36,6 +38,7 @@
 
         <component>
           <name>GANGLIA_MONITOR</name>
+          <displayName>Ganglia Monitor</displayName>
           <category>SLAVE</category>
           <cardinality>ALL</cardinality>
           <auto-deploy>

http://git-wip-us.apache.org/repos/asf/ambari/blob/02ee3d44/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HBASE/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HBASE/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HBASE/metainfo.xml
index af6b100..67d4adf 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HBASE/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HBASE/metainfo.xml
@@ -20,6 +20,7 @@
   <services>
     <service>
       <name>HBASE</name>
+      <displayName>HBase</displayName>
       <comment>Non-relational distributed database and centralized service for configuration management &amp;
         synchronization
       </comment>
@@ -27,6 +28,7 @@
       <components>
         <component>
           <name>HBASE_MASTER</name>
+          <displayName>HBase Master</displayName>
           <category>MASTER</category>
           <cardinality>1+</cardinality>
           <dependencies>
@@ -65,6 +67,7 @@
 
         <component>
           <name>HBASE_REGIONSERVER</name>
+          <displayName>RegionServer</displayName>
           <category>SLAVE</category>
           <cardinality>1+</cardinality>
           <commandScript>
@@ -75,6 +78,7 @@
 
         <component>
           <name>HBASE_CLIENT</name>
+          <displayName>HBase Client</displayName>
           <category>CLIENT</category>
           <cardinality>1+</cardinality>
           <commandScript>

http://git-wip-us.apache.org/repos/asf/ambari/blob/02ee3d44/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/metainfo.xml
index 53b9304..57308c0 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/metainfo.xml
@@ -20,12 +20,14 @@
   <services>
     <service>
       <name>HDFS</name>
+      <displayName>HDFS</displayName>
       <comment>Apache Hadoop Distributed File System</comment>
       <version>2.1.0.2.0</version>
 
       <components>
         <component>
           <name>NAMENODE</name>
+          <displayName>NameNode</displayName>
           <category>MASTER</category>
           <cardinality>1-2</cardinality>
           <commandScript>
@@ -55,6 +57,7 @@
 
         <component>
           <name>DATANODE</name>
+          <displayName>DataNode</displayName>
           <category>SLAVE</category>
           <cardinality>1+</cardinality>
           <commandScript>
@@ -66,6 +69,7 @@
 
         <component>
           <name>SECONDARY_NAMENODE</name>
+          <displayName>SNameNode</displayName>
           <!-- TODO:  cardinality is conditional on HA usage -->
           <cardinality>1</cardinality>
           <category>MASTER</category>
@@ -78,6 +82,7 @@
 
         <component>
           <name>HDFS_CLIENT</name>
+          <displayName>HDFS Client</displayName>
           <category>CLIENT</category>
           <cardinality>1+</cardinality>
           <commandScript>
@@ -111,6 +116,7 @@
 
         <component>
           <name>JOURNALNODE</name>
+          <displayName>JournalNode</displayName>
           <category>SLAVE</category>
           <cardinality>0+</cardinality>
           <commandScript>
@@ -122,6 +128,7 @@
 
         <component>
           <name>ZKFC</name>
+          <displayName>ZKFailoverController</displayName>
           <category>SLAVE</category>
           <!-- TODO: cardinality is conditional on HA topology -->
           <cardinality>0+</cardinality>

http://git-wip-us.apache.org/repos/asf/ambari/blob/02ee3d44/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/metainfo.xml
index a883e36..d31156e 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/metainfo.xml
@@ -20,12 +20,14 @@
   <services>
     <service>
       <name>HIVE</name>
+      <displayName>Hive</displayName>
       <comment>Data warehouse system for ad-hoc queries &amp; analysis of large datasets and table &amp; storage management service</comment>
       <version>0.12.0.2.0</version>
       <components>
 
         <component>
           <name>HIVE_METASTORE</name>
+          <displayName>Hive Metastore</displayName>
           <category>MASTER</category>
           <cardinality>1</cardinality>
           <auto-deploy>
@@ -41,6 +43,7 @@
 
         <component>
           <name>HIVE_SERVER</name>
+          <displayName>HiveServer2</displayName>
           <category>MASTER</category>
           <cardinality>1</cardinality>
           <dependencies>
@@ -75,6 +78,7 @@
 
         <component>
           <name>MYSQL_SERVER</name>
+          <displayName>MySQL Server</displayName>
           <category>MASTER</category>
           <cardinality>0-1</cardinality>
           <commandScript>
@@ -85,6 +89,7 @@
 
         <component>
           <name>HIVE_CLIENT</name>
+          <displayName>Hive Client</displayName>
           <category>CLIENT</category>
           <cardinality>1+</cardinality>
           <commandScript>
@@ -176,11 +181,15 @@
 
     <service>
       <name>HCATALOG</name>
-      <comment>This is comment for HCATALOG service</comment>
+      <displayName>HCatalog</displayName>
+      <comment>A table and storage management layer for Hadoop that enables users with different data processing tools
+        to more easily read and write data on the grid.
+      </comment>
       <version>0.12.0.2.0.6.0</version>
       <components>
         <component>
           <name>HCAT</name>
+          <displayName>HCat</displayName>
           <category>CLIENT</category>
           <commandScript>
             <script>scripts/hcat_client.py</script>

http://git-wip-us.apache.org/repos/asf/ambari/blob/02ee3d44/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/NAGIOS/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/NAGIOS/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/NAGIOS/metainfo.xml
index 2a642b5..43724c0 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/NAGIOS/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/NAGIOS/metainfo.xml
@@ -20,11 +20,13 @@
   <services>
     <service>
       <name>NAGIOS</name>
+      <displayName>Nagios</displayName>
       <comment>Nagios Monitoring and Alerting system</comment>
       <version>3.5.0</version>
       <components>
         <component>
            <name>NAGIOS_SERVER</name>
+          <displayName>Nagios Server</displayName>
            <category>MASTER</category>
            <cardinality>1</cardinality>
           <dependencies>

http://git-wip-us.apache.org/repos/asf/ambari/blob/02ee3d44/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/OOZIE/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/OOZIE/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/OOZIE/metainfo.xml
index 093d5d3..444bbcb 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/OOZIE/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/OOZIE/metainfo.xml
@@ -20,12 +20,14 @@
   <services>
     <service>
       <name>OOZIE</name>
+      <displayName>Oozie</displayName>
       <comment>System for workflow coordination and execution of Apache Hadoop jobs.  This also includes the installation of the optional Oozie Web Console which relies on and will install the &lt;a target="_blank" href="http://www.sencha.com/legal/open-source-faq/"&gt;ExtJS&lt;/a&gt; Library.
       </comment>
       <version>4.0.0.2.0</version>
       <components>
         <component>
           <name>OOZIE_SERVER</name>
+          <displayName>Oozie Server</displayName>
           <category>MASTER</category>
           <cardinality>1</cardinality>
           <dependencies>
@@ -60,6 +62,7 @@
 
         <component>
           <name>OOZIE_CLIENT</name>
+          <displayName>Oozie Client</displayName>
           <category>CLIENT</category>
           <cardinality>1+</cardinality>
           <dependencies>

http://git-wip-us.apache.org/repos/asf/ambari/blob/02ee3d44/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/PIG/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/PIG/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/PIG/metainfo.xml
index 5d9dba5..a41d1f7 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/PIG/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/PIG/metainfo.xml
@@ -20,11 +20,13 @@
   <services>
     <service>
       <name>PIG</name>
+      <displayName>Pig</displayName>
       <comment>Scripting platform for analyzing large datasets</comment>
       <version>0.12.0.2.0</version>
       <components>
         <component>
           <name>PIG</name>
+          <displayName>Pig</displayName>
           <category>CLIENT</category>
           <cardinality>0+</cardinality>
           <commandScript>

http://git-wip-us.apache.org/repos/asf/ambari/blob/02ee3d44/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/SQOOP/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/SQOOP/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/SQOOP/metainfo.xml
index c8c01f1..1f4a90b 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/SQOOP/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/SQOOP/metainfo.xml
@@ -20,6 +20,7 @@
   <services>
     <service>
       <name>SQOOP</name>
+      <displayName>Sqoop</displayName>
       <comment>Tool for transferring bulk data between Apache Hadoop and
         structured data stores such as relational databases
       </comment>
@@ -28,6 +29,7 @@
       <components>
         <component>
           <name>SQOOP</name>
+          <displayName>Sqoop</displayName>
           <category>CLIENT</category>
           <cardinality>1+</cardinality>
           <dependencies>

http://git-wip-us.apache.org/repos/asf/ambari/blob/02ee3d44/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/metainfo.xml
index 5e93cf4..5f493a6 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/metainfo.xml
@@ -20,11 +20,13 @@
   <services>
     <service>
       <name>WEBHCAT</name>
-      <comment>This is comment for WEBHCAT service</comment>
+      <displayName>WebHCat</displayName>
+      <comment>Provides a REST-like web API for HCatalog and related Hadoop components.</comment>
       <version>0.12.0.2.0</version>
       <components>
         <component>
           <name>WEBHCAT_SERVER</name>
+          <displayName>WebHCat Server</displayName>
           <category>MASTER</category>
           <cardinality>1</cardinality>
           <dependencies>

http://git-wip-us.apache.org/repos/asf/ambari/blob/02ee3d44/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/metainfo.xml
index e08221a..dd0a780 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/metainfo.xml
@@ -21,12 +21,14 @@
   <services>
     <service>
       <name>YARN</name>
+      <displayName>YARN</displayName>
       <comment>Apache Hadoop NextGen MapReduce (YARN)</comment>
       <version>2.1.0.2.0</version>
       <components>
 
         <component>
           <name>RESOURCEMANAGER</name>
+          <displayName>ResourceManager</displayName>
           <category>MASTER</category>
           <cardinality>1-2</cardinality>
           <commandScript>
@@ -59,6 +61,7 @@
 
         <component>
           <name>NODEMANAGER</name>
+          <displayName>NodeManager</displayName>
           <category>SLAVE</category>
           <cardinality>1+</cardinality>
           <commandScript>
@@ -70,6 +73,7 @@
 
         <component>
           <name>YARN_CLIENT</name>
+          <displayName>Yarn Client</displayName>
           <category>CLIENT</category>
           <cardinality>1+</cardinality>
           <commandScript>
@@ -143,11 +147,13 @@
 
     <service>
       <name>MAPREDUCE2</name>
+      <displayName>MapReduce2</displayName>
       <comment>Apache Hadoop NextGen MapReduce (YARN)</comment>
       <version>2.1.0.2.0.6.0</version>
       <components>
         <component>
           <name>HISTORYSERVER</name>
+          <displayName>History Server</displayName>
           <category>MASTER</category>
           <cardinality>1</cardinality>
           <auto-deploy>
@@ -172,6 +178,7 @@
 
         <component>
           <name>MAPREDUCE2_CLIENT</name>
+          <displayName>MapReduce2 Client</displayName>
           <category>CLIENT</category>
           <cardinality>0+</cardinality>
           <commandScript>

http://git-wip-us.apache.org/repos/asf/ambari/blob/02ee3d44/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/ZOOKEEPER/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/ZOOKEEPER/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/ZOOKEEPER/metainfo.xml
index a259351..66713a0 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/ZOOKEEPER/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/ZOOKEEPER/metainfo.xml
@@ -20,12 +20,14 @@
   <services>
     <service>
       <name>ZOOKEEPER</name>
+      <displayName>ZooKeeper</displayName>
       <comment>Centralized service which provides highly reliable distributed coordination</comment>
       <version>3.4.5.2.0</version>
       <components>
 
         <component>
           <name>ZOOKEEPER_SERVER</name>
+          <displayName>ZooKeeper Server</displayName>
           <category>MASTER</category>
           <cardinality>1+</cardinality>
           <commandScript>

http://git-wip-us.apache.org/repos/asf/ambari/blob/02ee3d44/ambari-server/src/main/resources/stacks/HDP/2.1/services/FALCON/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.1/services/FALCON/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.1/services/FALCON/metainfo.xml
index c16893e..e8af69d 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.1/services/FALCON/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.1/services/FALCON/metainfo.xml
@@ -20,11 +20,13 @@
   <services>
     <service>
       <name>FALCON</name>
+      <displayName>Falcon</displayName>
       <comment>Data management and processing platform</comment>
       <version>0.5.0.2.1</version>
       <components>
         <component>
           <name>FALCON_CLIENT</name>
+          <displayName>Falcon Client</displayName>
           <category>CLIENT</category>
           <cardinality>1+</cardinality>
           <commandScript>
@@ -42,6 +44,7 @@
         </component>
         <component>
           <name>FALCON_SERVER</name>
+          <displayName>Falcon Server</displayName>
           <category>MASTER</category>
           <cardinality>1</cardinality>
           <dependencies>

http://git-wip-us.apache.org/repos/asf/ambari/blob/02ee3d44/ambari-server/src/main/resources/stacks/HDP/2.1/services/STORM/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.1/services/STORM/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.1/services/STORM/metainfo.xml
index 1f15f8f..f2c391c 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.1/services/STORM/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.1/services/STORM/metainfo.xml
@@ -21,12 +21,14 @@
   <services>
     <service>
       <name>STORM</name>
+      <displayName>Storm</displayName>
       <comment>Apache Hadoop Stream processing framework</comment>
       <version>0.9.1.2.1</version>
       <components>
 
         <component>
           <name>NIMBUS</name>
+          <displayName>Nimbus</displayName>
           <category>MASTER</category>
           <cardinality>1</cardinality>
           <dependencies>
@@ -47,6 +49,7 @@
 
         <component>
           <name>STORM_REST_API</name>
+          <displayName>Storm REST API Server</displayName>
           <category>MASTER</category>
           <cardinality>1</cardinality>
           <commandScript>
@@ -58,6 +61,7 @@
 
         <component>
           <name>SUPERVISOR</name>
+          <displayName>Supervisor</displayName>
           <category>SLAVE</category>
           <cardinality>1+</cardinality>
           <commandScript>
@@ -69,6 +73,7 @@
 
         <component>
           <name>STORM_UI_SERVER</name>
+          <displayName>Storm UI Server</displayName>
           <category>MASTER</category>
           <cardinality>1</cardinality>
           <commandScript>
@@ -80,6 +85,7 @@
 
         <component>
           <name>DRPC_SERVER</name>
+          <displayName>DRPC Server</displayName>
           <category>MASTER</category>
           <cardinality>1</cardinality>
           <commandScript>

http://git-wip-us.apache.org/repos/asf/ambari/blob/02ee3d44/ambari-server/src/main/resources/stacks/HDP/2.1/services/TEZ/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.1/services/TEZ/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.1/services/TEZ/metainfo.xml
index 177d538..641de86 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.1/services/TEZ/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.1/services/TEZ/metainfo.xml
@@ -20,11 +20,13 @@
   <services>
     <service>
       <name>TEZ</name>
+      <displayName>Tez</displayName>
       <comment>Tez is the next generation Hadoop Query Processing framework written on top of YARN.</comment>
       <version>0.4.0.2.1</version>
       <components>
         <component>
           <name>TEZ_CLIENT</name>
+          <displayName>Tez Client</displayName>
           <cardinality>1+</cardinality>
           <category>CLIENT</category>
           <commandScript>

http://git-wip-us.apache.org/repos/asf/ambari/blob/02ee3d44/ambari-server/src/main/resources/stacks/HDP/2.1/services/YARN/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.1/services/YARN/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.1/services/YARN/metainfo.xml
index 424f363..0bbf30b 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.1/services/YARN/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.1/services/YARN/metainfo.xml
@@ -21,12 +21,14 @@
   <services>
     <service>
       <name>YARN</name>
+      <displayName>YARN</displayName>
       <comment>Apache Hadoop NextGen MapReduce (YARN)</comment>
       <version>2.4.0.2.1</version>
       <components>
 
         <component>
           <name>APP_TIMELINE_SERVER</name>
+          <displayName>App Timeline Server</displayName>
           <category>MASTER</category>
           <cardinality>0-1</cardinality>
           <commandScript>

http://git-wip-us.apache.org/repos/asf/ambari/blob/02ee3d44/ambari-server/src/test/java/org/apache/ambari/server/api/util/StackExtensionHelperTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/api/util/StackExtensionHelperTest.java b/ambari-server/src/test/java/org/apache/ambari/server/api/util/StackExtensionHelperTest.java
index 2c5aa0a..3a62147 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/api/util/StackExtensionHelperTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/api/util/StackExtensionHelperTest.java
@@ -81,12 +81,14 @@ public class StackExtensionHelperTest {
       if (serviceInfo.getName().equals("HIVE")) {
         // Check old-style service
         assertEquals("HIVE", serviceInfo.getName());
+        assertEquals("Hive", serviceInfo.getDisplayName());
         assertEquals("2.0", serviceInfo.getSchemaVersion());
         assertTrue(serviceInfo.getComment().startsWith("Data warehouse system"));
         assertEquals("0.11.0.2.0.5.0", serviceInfo.getVersion());
         // Check some component definitions
         List<ComponentInfo> components = serviceInfo.getComponents();
         assertEquals("HIVE_METASTORE", components.get(0).getName());
+        assertEquals("Hive Metastore", components.get(0).getDisplayName());
         assertEquals("MASTER", components.get(0).getCategory());
         List<PropertyInfo> properties = serviceInfo.getProperties();
         // Check some property

http://git-wip-us.apache.org/repos/asf/ambari/blob/02ee3d44/ambari-server/src/test/resources/stacks/HDP/2.0.6/services/NAGIOS/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/resources/stacks/HDP/2.0.6/services/NAGIOS/metainfo.xml b/ambari-server/src/test/resources/stacks/HDP/2.0.6/services/NAGIOS/metainfo.xml
index 3751280..958eb14 100644
--- a/ambari-server/src/test/resources/stacks/HDP/2.0.6/services/NAGIOS/metainfo.xml
+++ b/ambari-server/src/test/resources/stacks/HDP/2.0.6/services/NAGIOS/metainfo.xml
@@ -20,11 +20,13 @@
   <services>
     <service>
       <name>NAGIOS</name>
+      <displayName>Nagios</displayName>
       <comment>Nagios Monitoring and Alerting system</comment>
       <version>3.5.0</version>
       <components>
         <component>
            <name>NAGIOS_SERVER</name>
+           <displayName>Nagios Server</displayName>
            <category>MASTER</category>
            <cardinality>1</cardinality>
           <dependencies>

http://git-wip-us.apache.org/repos/asf/ambari/blob/02ee3d44/ambari-server/src/test/resources/stacks/HDP/2.0.6/services/WEBHCAT/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/resources/stacks/HDP/2.0.6/services/WEBHCAT/metainfo.xml b/ambari-server/src/test/resources/stacks/HDP/2.0.6/services/WEBHCAT/metainfo.xml
index 13a4d5b..d73e67e 100644
--- a/ambari-server/src/test/resources/stacks/HDP/2.0.6/services/WEBHCAT/metainfo.xml
+++ b/ambari-server/src/test/resources/stacks/HDP/2.0.6/services/WEBHCAT/metainfo.xml
@@ -20,6 +20,7 @@
   <services>
     <service>
       <name>WEBHCAT</name>
+      <displayName>WebHCat</displayName>
       <comment>This is comment for WEBHCAT service</comment>
       <version>0.11.0.2.0.5.0</version>
     </service>

http://git-wip-us.apache.org/repos/asf/ambari/blob/02ee3d44/ambari-server/src/test/resources/stacks/HDP/2.0.6/services/YARN/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/resources/stacks/HDP/2.0.6/services/YARN/metainfo.xml b/ambari-server/src/test/resources/stacks/HDP/2.0.6/services/YARN/metainfo.xml
index 78e31cc..01e93c2 100644
--- a/ambari-server/src/test/resources/stacks/HDP/2.0.6/services/YARN/metainfo.xml
+++ b/ambari-server/src/test/resources/stacks/HDP/2.0.6/services/YARN/metainfo.xml
@@ -21,12 +21,14 @@
   <services>
     <service>
       <name>YARN</name>
+      <displayName>YARN</displayName>
       <comment>Apache Hadoop NextGen MapReduce (YARN)</comment>
       <version>2.1.0.2.0.6.0</version>
       <components>
 
         <component>
           <name>RESOURCEMANAGER</name>
+          <displayName>ResourceManager</displayName>
           <category>MASTER</category>
           <cardinality>1</cardinality>
           <commandScript>
@@ -59,6 +61,7 @@
 
         <component>
           <name>NODEMANAGER</name>
+          <displayName>NodeManager</displayName>
           <category>SLAVE</category>
           <cardinality>1+</cardinality>
           <commandScript>
@@ -69,6 +72,7 @@
         </component>
         <component>
           <name>YARN_CLIENT</name>
+          <displayName>Yarn Client</displayName>
           <category>CLIENT</category>
           <deleted>true</deleted>
           <cardinality>0+</cardinality>

http://git-wip-us.apache.org/repos/asf/ambari/blob/02ee3d44/ambari-server/src/test/resources/stacks/HDP/2.0.7/services/HBASE/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/resources/stacks/HDP/2.0.7/services/HBASE/metainfo.xml b/ambari-server/src/test/resources/stacks/HDP/2.0.7/services/HBASE/metainfo.xml
index e4a76c5..e3ebea1 100644
--- a/ambari-server/src/test/resources/stacks/HDP/2.0.7/services/HBASE/metainfo.xml
+++ b/ambari-server/src/test/resources/stacks/HDP/2.0.7/services/HBASE/metainfo.xml
@@ -23,6 +23,7 @@
   <services>
     <service>
       <name>HBASE</name>
+      <displayName>HBase</displayName>
       <comment>Non-relational distributed database and centralized service for configuration management &amp;
         synchronization
       </comment>
@@ -30,6 +31,7 @@
       <components>
         <component>
           <name>HBASE_MASTER</name>
+          <displayName>HBase Master</displayName>
           <category>MASTER</category>
           <commandScript> <!--This is the script to handle all default commands -->
             <script>scripts/hbase_master.py</script>
@@ -58,6 +60,7 @@
 
         <component>
           <name>HBASE_REGIONSERVER</name>
+          <displayName>RegionServer</displayName>
           <category>SLAVE</category>
           <commandScript> <!--This is the script to handle all default commands -->
             <script>scripts/hbase_master.py</script>
@@ -67,6 +70,7 @@
 
         <component>
           <name>HBASE_CLIENT</name>
+          <displayName>HBase Client</displayName>
           <category>CLIENT</category>
         </component>
       </components>

http://git-wip-us.apache.org/repos/asf/ambari/blob/02ee3d44/ambari-server/src/test/resources/stacks/HDP/2.0.7/services/HDFS/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/resources/stacks/HDP/2.0.7/services/HDFS/metainfo.xml b/ambari-server/src/test/resources/stacks/HDP/2.0.7/services/HDFS/metainfo.xml
index 74f47e7..69f13e9 100644
--- a/ambari-server/src/test/resources/stacks/HDP/2.0.7/services/HDFS/metainfo.xml
+++ b/ambari-server/src/test/resources/stacks/HDP/2.0.7/services/HDFS/metainfo.xml
@@ -20,12 +20,14 @@
   <services>
     <service>
       <name>HDFS</name>
+      <displayName>HDFS</displayName>
       <comment>Apache Hadoop Distributed File System</comment>
       <version>2.1.0.2.0.6.0</version>
 
       <components>
         <component>
           <name>NAMENODE</name>
+          <displayName>NameNode</displayName>
           <category>MASTER</category>
           <commandScript>
             <script>scripts/namenode.py</script>
@@ -54,6 +56,7 @@
 
         <component>
           <name>DATANODE</name>
+          <displayName>DataNode</displayName>
           <category>SLAVE</category>
           <commandScript>
             <script>scripts/datanode.py</script>
@@ -64,6 +67,7 @@
 
         <component>
           <name>SECONDARY_NAMENODE</name>
+          <displayName>SNameNode</displayName>
           <category>MASTER</category>
           <commandScript>
             <script>scripts/snamenode.py</script>
@@ -74,6 +78,7 @@
 
         <component>
           <name>HDFS_CLIENT</name>
+          <displayName>HDFS Client</displayName>
           <category>CLIENT</category>
           <commandScript>
             <script>scripts/hdfs_client.py</script>
@@ -84,6 +89,7 @@
 
         <component>
           <name>JOURNALNODE</name>
+          <displayName>JournalNode</displayName>
           <category>MASTER</category>
           <commandScript>
             <script>scripts/journalnode.py</script>
@@ -94,6 +100,7 @@
 
         <component>
           <name>ZKFC</name>
+          <displayName>ZKFailoverController</displayName>
           <category>SLAVE</category>
           <commandScript>
             <script>scripts/zkfc_slave.py</script>

http://git-wip-us.apache.org/repos/asf/ambari/blob/02ee3d44/ambari-server/src/test/resources/stacks/HDP/2.0.7/services/HIVE/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/resources/stacks/HDP/2.0.7/services/HIVE/metainfo.xml b/ambari-server/src/test/resources/stacks/HDP/2.0.7/services/HIVE/metainfo.xml
index d6fe959..7be9ccd 100644
--- a/ambari-server/src/test/resources/stacks/HDP/2.0.7/services/HIVE/metainfo.xml
+++ b/ambari-server/src/test/resources/stacks/HDP/2.0.7/services/HIVE/metainfo.xml
@@ -21,6 +21,7 @@
     <service>
 
       <name>HIVE</name>
+      <displayName>Hive</displayName>
       <comment>Data warehouse system for ad-hoc queries &amp; analysis of large datasets and table &amp; storage management service</comment>
       <version>0.11.0.2.0.5.0</version>
 
@@ -28,6 +29,7 @@
 
         <component>
           <name>HIVE_METASTORE</name>
+          <displayName>Hive Metastore</displayName>
           <category>MASTER</category>
           <cardinality>1</cardinality>
           <auto-deploy>
@@ -43,6 +45,7 @@
 
         <component>
           <name>HIVE_SERVER</name>
+          <displayName>HiveServer2</displayName>
           <category>MASTER</category>
           <cardinality>1</cardinality>
           <dependencies>
@@ -84,6 +87,7 @@
 
         <component>
           <name>MYSQL_SERVER</name>
+          <displayName>MySQL Server</displayName>
           <category>MASTER</category>
           <cardinality>1</cardinality>
           <commandScript>
@@ -94,6 +98,7 @@
 
         <component>
           <name>HIVE_CLIENT</name>
+          <displayName>Hive Client</displayName>
           <category>CLIENT</category>
           <cardinality>0+</cardinality>
           <commandScript>

http://git-wip-us.apache.org/repos/asf/ambari/blob/02ee3d44/ambari-server/src/test/resources/stacks/HDP/2.0.7/services/ZOOKEEPER/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/resources/stacks/HDP/2.0.7/services/ZOOKEEPER/metainfo.xml b/ambari-server/src/test/resources/stacks/HDP/2.0.7/services/ZOOKEEPER/metainfo.xml
index fc09417..d397d19 100644
--- a/ambari-server/src/test/resources/stacks/HDP/2.0.7/services/ZOOKEEPER/metainfo.xml
+++ b/ambari-server/src/test/resources/stacks/HDP/2.0.7/services/ZOOKEEPER/metainfo.xml
@@ -20,12 +20,14 @@
   <services>
     <service>
       <name>ZOOKEEPER</name>
+      <displayName>ZooKeeper</displayName>
       <comment>Centralized service which provides highly reliable distributed coordination</comment>
       <version>3.4.5.2.0</version>
       <components>
 
         <component>
           <name>ZOOKEEPER_SERVER</name>
+          <displayName>ZooKeeper Server</displayName>
           <category>MASTER</category>
           <cardinality>1</cardinality>
           <commandScript>
@@ -37,6 +39,7 @@
 
         <component>
           <name>ZOOKEEPER_CLIENT</name>
+          <displayName>ZooKeeper Client</displayName>
           <category>CLIENT</category>
           <cardinality>0+</cardinality>
           <commandScript>

http://git-wip-us.apache.org/repos/asf/ambari/blob/02ee3d44/ambari-server/src/test/resources/stacks/HDP/2.0.8/services/SQOOP/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/resources/stacks/HDP/2.0.8/services/SQOOP/metainfo.xml b/ambari-server/src/test/resources/stacks/HDP/2.0.8/services/SQOOP/metainfo.xml
index 44d4232..15d9d75 100644
--- a/ambari-server/src/test/resources/stacks/HDP/2.0.8/services/SQOOP/metainfo.xml
+++ b/ambari-server/src/test/resources/stacks/HDP/2.0.8/services/SQOOP/metainfo.xml
@@ -20,6 +20,7 @@
   <services>
     <service>
       <name>SQOOP</name>
+      <displayName>Sqoop</displayName>
       <comment>Tool for transferring bulk data between Apache Hadoop and structured data stores such as relational databases</comment>
       <version>1.23</version>
       <components>

http://git-wip-us.apache.org/repos/asf/ambari/blob/02ee3d44/ambari-server/src/test/resources/stacks/HDP/2.1.1/services/STORM/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/resources/stacks/HDP/2.1.1/services/STORM/metainfo.xml b/ambari-server/src/test/resources/stacks/HDP/2.1.1/services/STORM/metainfo.xml
index 2552cbe..79a3130 100644
--- a/ambari-server/src/test/resources/stacks/HDP/2.1.1/services/STORM/metainfo.xml
+++ b/ambari-server/src/test/resources/stacks/HDP/2.1.1/services/STORM/metainfo.xml
@@ -37,6 +37,7 @@
 
         <component>
           <name>STORM_REST_API</name>
+          <displayName>Storm REST API Server</displayName>
           <category>MASTER</category>
           <commandScript>
             <script>scripts/rest_api.py</script>
@@ -47,6 +48,7 @@
 
         <component>
           <name>SUPERVISOR</name>
+          <displayName>Supervisor</displayName>
           <category>SLAVE</category>
           <commandScript>
             <script>scripts/supervisor.py</script>
@@ -57,6 +59,7 @@
 
         <component>
           <name>STORM_UI_SERVER</name>
+          <displayName>Storm UI Server</displayName>
           <category>MASTER</category>
           <commandScript>
             <script>scripts/ui_server.py</script>
@@ -67,6 +70,7 @@
 
         <component>
           <name>DRPC_SERVER</name>
+          <displayName>DRPC Server</displayName>
           <category>MASTER</category>
           <commandScript>
             <script>scripts/drpc_server.py</script>

http://git-wip-us.apache.org/repos/asf/ambari/blob/02ee3d44/ambari-web/app/mappers/stack_service_mapper.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/mappers/stack_service_mapper.js b/ambari-web/app/mappers/stack_service_mapper.js
index 11d066e..7609d9d 100644
--- a/ambari-web/app/mappers/stack_service_mapper.js
+++ b/ambari-web/app/mappers/stack_service_mapper.js
@@ -24,6 +24,7 @@ App.stackServiceMapper = App.QuickDataMapper.create({
   config: {
     id: 'service_name',
     service_name: 'service_name',
+    display_name: 'display_name',
     config_types: 'config_types',
     comments: 'comments',
     service_version: 'service_version',
@@ -42,6 +43,7 @@ App.stackServiceMapper = App.QuickDataMapper.create({
   component_config: {
     id: 'component_name',
     component_name: 'component_name',
+    display_name: 'display_name',
     cardinality: 'cardinality',
     custom_commands: 'custom_commands',
     service_name: 'service_name',

http://git-wip-us.apache.org/repos/asf/ambari/blob/02ee3d44/ambari-web/app/mixins.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/mixins.js b/ambari-web/app/mixins.js
index 020b891..e68fcc4 100644
--- a/ambari-web/app/mixins.js
+++ b/ambari-web/app/mixins.js
@@ -23,7 +23,6 @@ require('mixins/common/blueprint');
 require('mixins/common/localStorage');
 require('mixins/common/userPref');
 require('mixins/common/serverValidator');
-require('mixins/models/service_mixin');
 require('mixins/common/tableServerProvider');
 require('mixins/common/table_server_mixin');
 require('mixins/main/host/details/host_components/decommissionable');

http://git-wip-us.apache.org/repos/asf/ambari/blob/02ee3d44/ambari-web/app/mixins/models/service_mixin.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/mixins/models/service_mixin.js b/ambari-web/app/mixins/models/service_mixin.js
deleted file mode 100644
index 4c6b069..0000000
--- a/ambari-web/app/mixins/models/service_mixin.js
+++ /dev/null
@@ -1,30 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-var App = require('app');
-
-/**
- * Service Mixin that used for App.StackService and App.Service models.
- *
- **/
-App.ServiceModelMixin = Em.Mixin.create({
-  serviceName: DS.attr('string'),
-  displayName: function() {
-    return App.format.role(this.get('serviceName'));
-  }.property('serviceName')
-});

http://git-wip-us.apache.org/repos/asf/ambari/blob/02ee3d44/ambari-web/app/models/service.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/models/service.js b/ambari-web/app/models/service.js
index b275a9b..03d2a2e 100644
--- a/ambari-web/app/models/service.js
+++ b/ambari-web/app/models/service.js
@@ -19,9 +19,12 @@
 
 var App = require('app');
 require('utils/config');
-require('mixins/models/service_mixin');
 
-App.Service = DS.Model.extend(App.ServiceModelMixin, {
+App.Service = DS.Model.extend({
+  serviceName: DS.attr('string'),
+  displayName: function() {
+    return App.format.role(this.get('serviceName'));
+  }.property('serviceName'),
   passiveState: DS.attr('string'),
   workStatus: DS.attr('string'),
   rand: DS.attr('string'),

http://git-wip-us.apache.org/repos/asf/ambari/blob/02ee3d44/ambari-web/app/models/stack_service.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/models/stack_service.js b/ambari-web/app/models/stack_service.js
index e19e27f..2baf65a 100644
--- a/ambari-web/app/models/stack_service.js
+++ b/ambari-web/app/models/stack_service.js
@@ -18,7 +18,6 @@
 
 var App = require('app');
 require('utils/helper');
-require('mixins/models/service_mixin');
 require('models/service_config');
 //TODO after moving validation/recommendation to BE belove requirements must be deleted
 require('utils/configs/defaults_providers/yarn_defaults_provider');
@@ -37,7 +36,9 @@ require('utils/configs/validators/storm_configs_validator');
  * The model maps to the  http://hostname:8080/api/v1/stacks2/HDP/versions/${versionNumber}/stackServices?fields=StackServices/*,serviceComponents/*
  * @type {*}
  */
-App.StackService = DS.Model.extend(App.ServiceModelMixin, {
+App.StackService = DS.Model.extend({
+  serviceName: DS.attr('string'),
+  displayName: DS.attr('string'),
   comments: DS.attr('string'),
   configTypes: DS.attr('object'),
   serviceVersion: DS.attr('string'),

http://git-wip-us.apache.org/repos/asf/ambari/blob/02ee3d44/ambari-web/app/models/stack_service_component.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/models/stack_service_component.js b/ambari-web/app/models/stack_service_component.js
index 3ed366c..8c5594d 100644
--- a/ambari-web/app/models/stack_service_component.js
+++ b/ambari-web/app/models/stack_service_component.js
@@ -24,6 +24,7 @@ var numberUtils = require('utils/number_utils');
  */
 App.StackServiceComponent = DS.Model.extend({
   componentName: DS.attr('string'),
+  displayName: DS.attr('string'),
   cardinality: DS.attr('string'),
   customCommands: DS.attr('array'),
   dependencies: DS.attr('array'),
@@ -54,15 +55,6 @@ App.StackServiceComponent = DS.Model.extend({
     return numberUtils.getCardinalityValue(this.get('cardinality'), true);
   }.property('cardinality'),
 
-  /** @property {String} displayName**/
-  displayName: function() {
-    if (App.format.role(this.get('componentName'))) {
-      return App.format.role(this.get('componentName'));
-    } else {
-      return this.get('componentName');
-    }
-  }.property('componentName'),
-
   /** @property {Boolean} isRequired - component required to install **/
   isRequired: function() {
     return this.get('minToInstall') > 0;

http://git-wip-us.apache.org/repos/asf/ambari/blob/02ee3d44/ambari-web/app/utils/helper.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/utils/helper.js b/ambari-web/app/utils/helper.js
index c22759d..3be9849 100644
--- a/ambari-web/app/utils/helper.js
+++ b/ambari-web/app/utils/helper.js
@@ -373,7 +373,18 @@ App.format = {
    * return {string}
    */
   role:function (role) {
-    return this.normalizeName(role);
+    var result;
+    var models = [App.StackService, App.StackServiceComponent];
+    models.forEach(function(model){
+      var instance =  model.find().findProperty('id',role);
+      if (instance) {
+        result = instance.get('displayName');
+      }
+    },this);
+    if (!result)  {
+      result =  this.normalizeName(role);
+    }
+    return result;
   },
 
   /**
@@ -397,7 +408,7 @@ App.format = {
       suffixRegExp.lastIndex = 0;
       var matches = suffixRegExp.exec(name);
       name = matches[1].capitalize() + matches[2].capitalize();
-    };
+    }
     return name.capitalize();
   },
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/02ee3d44/ambari-web/test/app_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/app_test.js b/ambari-web/test/app_test.js
index 85b50ee..5cf78cd 100644
--- a/ambari-web/test/app_test.js
+++ b/ambari-web/test/app_test.js
@@ -458,7 +458,7 @@ describe('App', function () {
     afterEach(function () {
       i++;
       App.StackServiceComponent.find.restore();
-    })
+    });
 
     testCases.forEach(function (test) {
       it(test.key + ' should contain ' + test.result, function () {


[23/35] AMBARI-7018. Stack API should provide display name for services and components. (jaimin)

Posted by jo...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/02ee3d44/ambari-web/test/service_components.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/service_components.js b/ambari-web/test/service_components.js
index fe7feca..8b89ab3 100644
--- a/ambari-web/test/service_components.js
+++ b/ambari-web/test/service_components.js
@@ -22,6 +22,7 @@ module.exports = {
       "StackServices" : {
         "comments" : "Apache Hadoop Distributed Processing Framework",
         "service_name" : "MAPREDUCE",
+        "display_name" : "MapReduce",
         "service_version" : "1.2.0.1.3.3.0",
         "stack_name" : "HDP",
         "stack_version" : "1.3.2"
@@ -31,6 +32,7 @@ module.exports = {
           "StackServiceComponents" : {
             "component_category" : "MASTER",
             "component_name" : "HISTORYSERVER",
+            "display_name" : "History Server",
             "is_client" : false,
             "is_master" : true,
             "service_name" : "MAPREDUCE",
@@ -43,6 +45,7 @@ module.exports = {
           "StackServiceComponents" : {
             "component_category" : "MASTER",
             "component_name" : "JOBTRACKER",
+            "display_name" : "JobTracker",
             "is_client" : false,
             "is_master" : true,
             "service_name" : "MAPREDUCE",
@@ -55,6 +58,7 @@ module.exports = {
           "StackServiceComponents" : {
             "component_category" : "CLIENT",
             "component_name" : "MAPREDUCE_CLIENT",
+            "display_name" : "MapReduce Client",
             "is_client" : true,
             "is_master" : false,
             "service_name" : "MAPREDUCE",
@@ -67,6 +71,7 @@ module.exports = {
           "StackServiceComponents" : {
             "component_category" : "SLAVE",
             "component_name" : "TASKTRACKER",
+            "display_name" : "TaskTracker",
             "is_client" : false,
             "is_master" : false,
             "service_name" : "MAPREDUCE",
@@ -84,6 +89,7 @@ module.exports = {
         "custom_commands" : [ ],
         "service_check_supported" : true,
         "service_name" : "FALCON",
+        "display_name" : "Falcon",
         "service_version" : "0.5.0.2.1",
         "stack_name" : "HDP",
         "stack_version" : "2.1",
@@ -118,6 +124,7 @@ module.exports = {
             "cardinality" : "1+",
             "component_category" : "CLIENT",
             "component_name" : "FALCON_CLIENT",
+            "display_name" : "Falcon Client",
             "custom_commands" : [ ],
             "is_client" : true,
             "is_master" : false,
@@ -133,6 +140,7 @@ module.exports = {
             "cardinality" : "1",
             "component_category" : "MASTER",
             "component_name" : "FALCON_SERVER",
+            "display_name" : "Falcon Server",
             "custom_commands" : [ ],
             "is_client" : false,
             "is_master" : true,
@@ -172,6 +180,7 @@ module.exports = {
         "custom_commands" : [ ],
         "service_check_supported" : true,
         "service_name" : "FLUME",
+        "display_name" : "Flume",
         "service_version" : "1.4.0.2.1.1.0",
         "stack_name" : "HDP",
         "stack_version" : "2.1",
@@ -201,6 +210,7 @@ module.exports = {
             "cardinality" : "0+",
             "component_category" : "SLAVE",
             "component_name" : "FLUME_HANDLER",
+            "display_name" : "Flume",
             "custom_commands" : [ ],
             "is_client" : false,
             "is_master" : false,
@@ -219,6 +229,7 @@ module.exports = {
         "custom_commands" : [ ],
         "service_check_supported" : false,
         "service_name" : "GANGLIA",
+        "display_name" : "Ganglia",
         "service_version" : "3.5.0",
         "stack_name" : "HDP",
         "stack_version" : "2.1",
@@ -238,6 +249,7 @@ module.exports = {
             "cardinality" : "ALL",
             "component_category" : "SLAVE",
             "component_name" : "GANGLIA_MONITOR",
+            "display_name" : "Ganglia Monitor",
             "custom_commands" : [ ],
             "is_client" : false,
             "is_master" : false,
@@ -256,6 +268,7 @@ module.exports = {
             "cardinality" : "1",
             "component_category" : "MASTER",
             "component_name" : "GANGLIA_SERVER",
+            "display_name" : "Ganglia Server",
             "custom_commands" : [ ],
             "is_client" : false,
             "is_master" : true,
@@ -274,6 +287,7 @@ module.exports = {
         "custom_commands" : [ ],
         "service_check_supported" : true,
         "service_name" : "HBASE",
+        "display_name" : "HBase",
         "service_version" : "0.98.0.2.1",
         "stack_name" : "HDP",
         "stack_version" : "2.1",
@@ -308,6 +322,7 @@ module.exports = {
             "cardinality" : "1+",
             "component_category" : "CLIENT",
             "component_name" : "HBASE_CLIENT",
+            "display_name" : "HBase Client",
             "custom_commands" : [ ],
             "is_client" : true,
             "is_master" : false,
@@ -323,6 +338,7 @@ module.exports = {
             "cardinality" : "1+",
             "component_category" : "MASTER",
             "component_name" : "HBASE_MASTER",
+            "display_name" : "HBase Master",
             "custom_commands" : [
               "DECOMMISSION"
             ],
@@ -361,6 +377,7 @@ module.exports = {
             "cardinality" : "1+",
             "component_category" : "SLAVE",
             "component_name" : "HBASE_REGIONSERVER",
+            "display_name" : "RegionServer",
             "custom_commands" : [ ],
             "is_client" : false,
             "is_master" : false,
@@ -379,6 +396,7 @@ module.exports = {
         "custom_commands" : [ ],
         "service_check_supported" : true,
         "service_name" : "HCATALOG",
+        "display_name" : "HCatalog",
         "service_version" : "0.12.0.2.1",
         "stack_name" : "HDP",
         "stack_version" : "2.1",
@@ -403,6 +421,7 @@ module.exports = {
             "cardinality" : null,
             "component_category" : "CLIENT",
             "component_name" : "HCAT",
+            "display_name" : "HCat",
             "custom_commands" : [ ],
             "is_client" : true,
             "is_master" : false,
@@ -421,6 +440,7 @@ module.exports = {
         "custom_commands" : [ ],
         "service_check_supported" : true,
         "service_name" : "HDFS",
+        "display_name" : "HDFS",
         "service_version" : "2.4.0.2.1",
         "stack_name" : "HDP",
         "stack_version" : "2.1",
@@ -460,6 +480,7 @@ module.exports = {
             "cardinality" : "1+",
             "component_category" : "SLAVE",
             "component_name" : "DATANODE",
+            "display_name" : "DataNode",
             "custom_commands" : [ ],
             "is_client" : false,
             "is_master" : false,
@@ -475,6 +496,7 @@ module.exports = {
             "cardinality" : "1+",
             "component_category" : "CLIENT",
             "component_name" : "HDFS_CLIENT",
+            "display_name" : "HDFS Client",
             "custom_commands" : [ ],
             "is_client" : true,
             "is_master" : false,
@@ -490,6 +512,7 @@ module.exports = {
             "cardinality" : "0+",
             "component_category" : "SLAVE",
             "component_name" : "JOURNALNODE",
+            "display_name" : "JournalNode",
             "custom_commands" : [ ],
             "is_client" : false,
             "is_master" : false,
@@ -505,6 +528,7 @@ module.exports = {
             "cardinality" : "1-2",
             "component_category" : "MASTER",
             "component_name" : "NAMENODE",
+            "display_name" : "NameNode",
             "custom_commands" : [
               "DECOMMISSION"
             ],
@@ -522,6 +546,7 @@ module.exports = {
             "cardinality" : "1",
             "component_category" : "MASTER",
             "component_name" : "SECONDARY_NAMENODE",
+            "display_name" : "SNameNode",
             "custom_commands" : [ ],
             "is_client" : false,
             "is_master" : true,
@@ -537,6 +562,7 @@ module.exports = {
             "cardinality" : "0+",
             "component_category" : "SLAVE",
             "component_name" : "ZKFC",
+            "display_name" : "ZKFailoverController",
             "custom_commands" : [ ],
             "is_client" : false,
             "is_master" : false,
@@ -555,6 +581,7 @@ module.exports = {
         "custom_commands" : [ ],
         "service_check_supported" : true,
         "service_name" : "HIVE",
+        "display_name" : "Hive",
         "service_version" : "0.13.0.2.1",
         "stack_name" : "HDP",
         "stack_version" : "2.1",
@@ -594,6 +621,7 @@ module.exports = {
             "cardinality" : "1+",
             "component_category" : "CLIENT",
             "component_name" : "HIVE_CLIENT",
+            "display_name" : "Hive Client",
             "custom_commands" : [ ],
             "is_client" : true,
             "is_master" : false,
@@ -609,6 +637,7 @@ module.exports = {
             "cardinality" : "1",
             "component_category" : "MASTER",
             "component_name" : "HIVE_METASTORE",
+            "display_name" : "Hive Metastore",
             "custom_commands" : [ ],
             "is_client" : false,
             "is_master" : true,
@@ -628,6 +657,7 @@ module.exports = {
             "cardinality" : "1",
             "component_category" : "MASTER",
             "component_name" : "HIVE_SERVER",
+            "display_name" : "HiveServer2",
             "custom_commands" : [ ],
             "is_client" : false,
             "is_master" : true,
@@ -684,6 +714,7 @@ module.exports = {
             "cardinality" : "0-1",
             "component_category" : "MASTER",
             "component_name" : "MYSQL_SERVER",
+            "display_name" : "MySQL Server",
             "custom_commands" : [ ],
             "is_client" : false,
             "is_master" : true,
@@ -702,6 +733,7 @@ module.exports = {
         "custom_commands" : [ ],
         "service_check_supported" : true,
         "service_name" : "MAPREDUCE2",
+        "display_name" : "MapReduce2",
         "service_version" : "2.1.0.2.0.6.0",
         "stack_name" : "HDP",
         "stack_version" : "2.1",
@@ -736,6 +768,7 @@ module.exports = {
             "cardinality" : "1",
             "component_category" : "MASTER",
             "component_name" : "HISTORYSERVER",
+            "display_name" : "History Server",
             "custom_commands" : [ ],
             "is_client" : false,
             "is_master" : true,
@@ -766,6 +799,7 @@ module.exports = {
             "cardinality" : "0+",
             "component_category" : "CLIENT",
             "component_name" : "MAPREDUCE2_CLIENT",
+            "display_name" : "MapReduce2 Client",
             "custom_commands" : [ ],
             "is_client" : true,
             "is_master" : false,
@@ -784,6 +818,7 @@ module.exports = {
         "custom_commands" : [ ],
         "service_check_supported" : false,
         "service_name" : "NAGIOS",
+        "display_name" : "Nagios",
         "service_version" : "3.5.0",
         "stack_name" : "HDP",
         "stack_version" : "2.1",
@@ -803,6 +838,7 @@ module.exports = {
             "cardinality" : "1",
             "component_category" : "MASTER",
             "component_name" : "NAGIOS_SERVER",
+            "display_name" : "Nagios Server",
             "custom_commands" : [ ],
             "is_client" : false,
             "is_master" : true,
@@ -882,6 +918,7 @@ module.exports = {
         "custom_commands" : [ ],
         "service_check_supported" : true,
         "service_name" : "OOZIE",
+        "display_name" : "Oozie",
         "service_version" : "4.0.0.2.1",
         "stack_name" : "HDP",
         "stack_version" : "2.1",
@@ -916,6 +953,7 @@ module.exports = {
             "cardinality" : "1+",
             "component_category" : "CLIENT",
             "component_name" : "OOZIE_CLIENT",
+            "display_name" : "Oozie Client",
             "custom_commands" : [ ],
             "is_client" : true,
             "is_master" : false,
@@ -952,6 +990,7 @@ module.exports = {
             "cardinality" : "1",
             "component_category" : "MASTER",
             "component_name" : "OOZIE_SERVER",
+            "display_name" : "Oozie Server",
             "custom_commands" : [ ],
             "is_client" : false,
             "is_master" : true,
@@ -1001,6 +1040,7 @@ module.exports = {
         "custom_commands" : [ ],
         "service_check_supported" : true,
         "service_name" : "PIG",
+        "display_name" : "Pig",
         "service_version" : "0.12.1.2.1",
         "stack_name" : "HDP",
         "stack_version" : "2.1",
@@ -1030,6 +1070,7 @@ module.exports = {
             "cardinality" : "0+",
             "component_category" : "CLIENT",
             "component_name" : "PIG",
+            "display_name" : "Pig",
             "custom_commands" : [ ],
             "is_client" : true,
             "is_master" : false,
@@ -1048,6 +1089,7 @@ module.exports = {
         "custom_commands" : [ ],
         "service_check_supported" : true,
         "service_name" : "SQOOP",
+        "display_name" : "Sqoop",
         "service_version" : "1.4.4.2.1",
         "stack_name" : "HDP",
         "stack_version" : "2.1",
@@ -1067,6 +1109,7 @@ module.exports = {
             "cardinality" : "1+",
             "component_category" : "CLIENT",
             "component_name" : "SQOOP",
+            "display_name" : "Sqoop",
             "custom_commands" : [ ],
             "is_client" : true,
             "is_master" : false,
@@ -1106,6 +1149,7 @@ module.exports = {
         "custom_commands" : [ ],
         "service_check_supported" : true,
         "service_name" : "STORM",
+        "display_name" : "Storm",
         "service_version" : "0.9.1.2.1",
         "stack_name" : "HDP",
         "stack_version" : "2.1",
@@ -1130,6 +1174,7 @@ module.exports = {
             "cardinality" : "1",
             "component_category" : "MASTER",
             "component_name" : "DRPC_SERVER",
+            "display_name" : "DRPC Server",
             "custom_commands" : [ ],
             "is_client" : false,
             "is_master" : true,
@@ -1145,6 +1190,7 @@ module.exports = {
             "cardinality" : "1",
             "component_category" : "MASTER",
             "component_name" : "NIMBUS",
+            "display_name" : "Nimbus",
             "custom_commands" : [ ],
             "is_client" : false,
             "is_master" : true,
@@ -1171,6 +1217,7 @@ module.exports = {
             "cardinality" : "1",
             "component_category" : "MASTER",
             "component_name" : "STORM_REST_API",
+            "display_name" : "Storm REST API Server",
             "custom_commands" : [ ],
             "is_client" : false,
             "is_master" : true,
@@ -1186,6 +1233,7 @@ module.exports = {
             "cardinality" : "1",
             "component_category" : "MASTER",
             "component_name" : "STORM_UI_SERVER",
+            "display_name" : "Storm UI Server",
             "custom_commands" : [ ],
             "is_client" : false,
             "is_master" : true,
@@ -1201,6 +1249,7 @@ module.exports = {
             "cardinality" : "1+",
             "component_category" : "SLAVE",
             "component_name" : "SUPERVISOR",
+            "display_name" : "Supervisor",
             "custom_commands" : [ ],
             "is_client" : false,
             "is_master" : false,
@@ -1219,6 +1268,7 @@ module.exports = {
         "custom_commands" : [ ],
         "service_check_supported" : false,
         "service_name" : "TEZ",
+        "display_name" : "Tez",
         "service_version" : "0.4.0.2.1",
         "stack_name" : "HDP",
         "stack_version" : "2.1",
@@ -1243,6 +1293,7 @@ module.exports = {
             "cardinality" : "1+",
             "component_category" : "CLIENT",
             "component_name" : "TEZ_CLIENT",
+            "display_name" : "Tez Client",
             "custom_commands" : [ ],
             "is_client" : true,
             "is_master" : false,
@@ -1261,6 +1312,7 @@ module.exports = {
         "custom_commands" : [ ],
         "service_check_supported" : true,
         "service_name" : "WEBHCAT",
+        "display_name" : "WebHCat",
         "service_version" : "0.13.0.2.1",
         "stack_name" : "HDP",
         "stack_version" : "2.1",
@@ -1285,6 +1337,7 @@ module.exports = {
             "cardinality" : "1",
             "component_category" : "MASTER",
             "component_name" : "WEBHCAT_SERVER",
+            "display_name" : "WebHCat Server",
             "custom_commands" : [ ],
             "is_client" : false,
             "is_master" : true,
@@ -1354,6 +1407,7 @@ module.exports = {
         "custom_commands" : [ ],
         "service_check_supported" : true,
         "service_name" : "YARN",
+        "display_name" : "YARN",
         "service_version" : "2.4.0.2.1",
         "stack_name" : "HDP",
         "stack_version" : "2.1",
@@ -1393,6 +1447,7 @@ module.exports = {
             "cardinality" : "0-1",
             "component_category" : "MASTER",
             "component_name" : "APP_TIMELINE_SERVER",
+            "display_name" : "App Timeline Server",
             "custom_commands" : [ ],
             "is_client" : false,
             "is_master" : true,
@@ -1408,6 +1463,7 @@ module.exports = {
             "cardinality" : "1+",
             "component_category" : "SLAVE",
             "component_name" : "NODEMANAGER",
+            "display_name" : "NodeManager",
             "custom_commands" : [ ],
             "is_client" : false,
             "is_master" : false,
@@ -1423,6 +1479,7 @@ module.exports = {
             "cardinality" : "1-2",
             "component_category" : "MASTER",
             "component_name" : "RESOURCEMANAGER",
+            "display_name" : "ResourceManager",
             "custom_commands" : [
               "DECOMMISSION",
               "REFRESHQUEUES"
@@ -1441,6 +1498,7 @@ module.exports = {
             "cardinality" : "1+",
             "component_category" : "CLIENT",
             "component_name" : "YARN_CLIENT",
+            "display_name" : "YARN Client",
             "custom_commands" : [ ],
             "is_client" : true,
             "is_master" : false,
@@ -1459,6 +1517,7 @@ module.exports = {
         "custom_commands" : [ ],
         "service_check_supported" : true,
         "service_name" : "ZOOKEEPER",
+        "display_name" : "ZooKeeper",
         "service_version" : "3.4.5.2.1",
         "stack_name" : "HDP",
         "stack_version" : "2.1",
@@ -1488,6 +1547,7 @@ module.exports = {
             "cardinality" : "1+",
             "component_category" : "CLIENT",
             "component_name" : "ZOOKEEPER_CLIENT",
+            "display_name" : "ZooKeeper Client",
             "custom_commands" : [ ],
             "is_client" : true,
             "is_master" : false,
@@ -1503,6 +1563,7 @@ module.exports = {
             "cardinality" : "1+",
             "component_category" : "MASTER",
             "component_name" : "ZOOKEEPER_SERVER",
+            "display_name" : "ZooKeeper Server",
             "custom_commands" : [ ],
             "is_client" : false,
             "is_master" : true,


[04/35] git commit: AMBARI-7000. Config History: Service Config page UI tweaks.(xiwang)

Posted by jo...@apache.org.
AMBARI-7000. Config History: Service Config page UI tweaks.(xiwang)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/46a91dbb
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/46a91dbb
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/46a91dbb

Branch: refs/heads/branch-alerts-dev
Commit: 46a91dbb0bfacd5cda1f8a0713061b798eac5722
Parents: 0c1bce0
Author: Xi Wang <xi...@apache.org>
Authored: Fri Aug 22 15:38:46 2014 -0700
Committer: Xi Wang <xi...@apache.org>
Committed: Mon Aug 25 15:47:43 2014 -0700

----------------------------------------------------------------------
 .../data/configurations/service_version.json    |  7 +-
 .../data/configurations/service_versions.json   | 77 +++++++++++---------
 .../main/dashboard/config_history_controller.js |  6 +-
 .../controllers/main/service/info/configs.js    |  4 +-
 .../mappers/service_config_version_mapper.js    |  3 +-
 ambari-web/app/messages.js                      |  1 +
 ambari-web/app/models/service_config_version.js | 12 ++-
 ambari-web/app/styles/application.less          | 58 +++++++++++++--
 .../common/configs/config_history_flow.hbs      | 40 ++++++----
 .../templates/main/dashboard/config_history.hbs |  6 +-
 ambari-web/app/utils/ajax/ajax.js               |  2 +-
 .../views/common/configs/config_history_flow.js | 37 +++++++++-
 12 files changed, 182 insertions(+), 71 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/46a91dbb/ambari-web/app/assets/data/configurations/service_version.json
----------------------------------------------------------------------
diff --git a/ambari-web/app/assets/data/configurations/service_version.json b/ambari-web/app/assets/data/configurations/service_version.json
index dc02514..bf48cb7 100644
--- a/ambari-web/app/assets/data/configurations/service_version.json
+++ b/ambari-web/app/assets/data/configurations/service_version.json
@@ -2,11 +2,12 @@
   "items": [
     {
       "serviceconfigversion": "1",
-      "servicename": "HDFS",
       "createtime": "43800000000",
-      "appliedtime": "58600000000",
       "author": "admin",
-      "notes": "Notes should be here",
+      "group_id" : null,
+      "group_name" : null,
+      "service_config_version_note" : null,
+      "service_name" : "HDFS",
       "configurations": [
         {
           "type": "hdfs-site",

http://git-wip-us.apache.org/repos/asf/ambari/blob/46a91dbb/ambari-web/app/assets/data/configurations/service_versions.json
----------------------------------------------------------------------
diff --git a/ambari-web/app/assets/data/configurations/service_versions.json b/ambari-web/app/assets/data/configurations/service_versions.json
index 9c1a8a4..ddd59f0 100644
--- a/ambari-web/app/assets/data/configurations/service_versions.json
+++ b/ambari-web/app/assets/data/configurations/service_versions.json
@@ -2,11 +2,12 @@
   "items": [
     {
       "serviceconfigversion": "1",
-      "servicename": "HDFS",
       "createtime": "43800000000",
-      "appliedtime": "58600000000",
       "author": "admin",
-      "notes": "Notes should be here",
+      "group_id" : null,
+      "group_name" : null,
+      "service_config_version_note" : "Notes should be here",
+      "service_name" : "HDFS",
       "configurations": [
         {
           "type": "core-site",
@@ -21,11 +22,12 @@
     },
     {
       "serviceconfigversion": "1",
-      "servicename": "YARN",
       "createtime": "43300000000",
-      "appliedtime": "58600000000",
       "author": "admin",
-      "notes": "Notes should be here",
+      "group_id" : null,
+      "group_name" : null,
+      "service_config_version_note" : "Notes should be here",
+      "service_name" : "YARN",
       "configurations": [
         {
           "type": "core-site",
@@ -40,11 +42,12 @@
     },
     {
       "serviceconfigversion": "2",
-      "servicename": "HDFS",
       "createtime": "43500000000",
-      "appliedtime": "58600000000",
       "author": "admin",
-      "notes": "Notes should be here",
+      "group_id" : null,
+      "group_name" : null,
+      "service_config_version_note" : "Notes should be here",
+      "service_name" : "HDFS",
       "configurations": [
         {
           "type": "core-site",
@@ -59,11 +62,12 @@
     },
     {
       "serviceconfigversion": "2",
-      "servicename": "YARN",
       "createtime": "13800000000",
-      "appliedtime": "58600000000",
       "author": "admin",
-      "notes": "Notes should be here",
+      "group_id" : null,
+      "group_name" : null,
+      "service_config_version_note" : "Notes should be here",
+      "service_name" : "YARN",
       "configurations": [
         {
           "type": "core-site",
@@ -78,11 +82,12 @@
     },
     {
       "serviceconfigversion": "3",
-      "servicename": "HDFS",
       "createtime": "23800000000",
-      "appliedtime": "58600000000",
       "author": "admin",
-      "notes": "Notes should be here",
+      "group_id" : null,
+      "group_name" : null,
+      "service_config_version_note" : "Notes should be here",
+      "service_name" : "HDFS",
       "configurations": [
         {
           "type": "core-site",
@@ -97,11 +102,12 @@
     },
     {
       "serviceconfigversion": "3",
-      "servicename": "YARN",
       "createtime": "47800000000",
-      "appliedtime": "58600000000",
       "author": "admin",
-      "notes": "Notes should be here",
+      "group_id" : null,
+      "group_name" : null,
+      "service_config_version_note" : "Notes should be here",
+      "service_name" : "YARN",
       "configurations": [
         {
           "type": "core-site",
@@ -116,11 +122,12 @@
     },
     {
       "serviceconfigversion": "4",
-      "servicename": "HDFS",
       "createtime": "43900000000",
-      "appliedtime": "58600000000",
       "author": "admin",
-      "notes": "Notes should be here",
+      "group_id" : null,
+      "group_name" : null,
+      "service_config_version_note" : "Notes should be here",
+      "service_name" : "HDFS",
       "configurations": [
         {
           "type": "core-site",
@@ -135,11 +142,12 @@
     },
     {
       "serviceconfigversion": "4",
-      "servicename": "YARN",
       "createtime": "33800000000",
-      "appliedtime": "58600000000",
       "author": "admin",
-      "notes": "Notes should be here",
+      "group_id" : null,
+      "group_name" : null,
+      "service_config_version_note" : "Notes should be here",
+      "service_name" : "YARN",
       "configurations": [
         {
           "type": "core-site",
@@ -154,11 +162,12 @@
     },
     {
       "serviceconfigversion": "5",
-      "servicename": "HDFS",
       "createtime": "41800000000",
-      "appliedtime": "58600000000",
       "author": "admin",
-      "notes": "Notes should be here",
+      "group_id" : null,
+      "group_name" : null,
+      "service_config_version_note" : "Notes should be here",
+      "service_name" : "HDFS",
       "configurations": [
         {
           "type": "core-site",
@@ -173,11 +182,12 @@
     },
     {
       "serviceconfigversion": "5",
-      "servicename": "YARN",
       "createtime": "46800000000",
-      "appliedtime": "58600000000",
       "author": "admin",
-      "notes": "Notes should be here",
+      "group_id" : null,
+      "group_name" : null,
+      "service_config_version_note" : "Notes should be here",
+      "service_name" : "YARN",
       "configurations": [
         {
           "type": "core-site",
@@ -192,11 +202,12 @@
     },
     {
       "serviceconfigversion": "6",
-      "servicename": "YARN",
       "createtime": "44800000000",
-      "appliedtime": "58600000000",
       "author": "admin",
-      "notes": "Notes should be here",
+      "group_id" : null,
+      "group_name" : null,
+      "service_config_version_note" : "Notes should be here",
+      "service_name" : "YARN",
       "configurations": [
         {
           "type": "core-site",

http://git-wip-us.apache.org/repos/asf/ambari/blob/46a91dbb/ambari-web/app/controllers/main/dashboard/config_history_controller.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/controllers/main/dashboard/config_history_controller.js b/ambari-web/app/controllers/main/dashboard/config_history_controller.js
index 574a5ae..2a81c28 100644
--- a/ambari-web/app/controllers/main/dashboard/config_history_controller.js
+++ b/ambari-web/app/controllers/main/dashboard/config_history_controller.js
@@ -31,7 +31,7 @@ App.MainConfigHistoryController = Em.ArrayController.extend(App.TableServerMixin
   filteredCount: 0,
   mockUrl: '/data/configurations/service_versions.json',
   realUrl: function () {
-    return App.apiPrefix + '/clusters/' + App.get('clusterName') + '/configurations/serviceconfigversions?<parameters>fields=serviceconfigversion,user,appliedtime,createtime,service_name,service_config_version_note&minimal_response=true';
+    return App.apiPrefix + '/clusters/' + App.get('clusterName') + '/configurations/serviceconfigversions?<parameters>fields=serviceconfigversion,user,group_id,group_name,createtime,service_name,service_config_version_note&minimal_response=true';
   }.property('App.clusterName'),
 
   /**
@@ -41,7 +41,7 @@ App.MainConfigHistoryController = Em.ArrayController.extend(App.TableServerMixin
   colPropAssoc: function () {
     var associations = [];
     associations[1] = 'serviceVersion';
-    associations[2] = 'configGroup';
+    associations[2] = 'configGroupName';
     associations[3] = 'createTime';
     associations[4] = 'author';
     associations[5] = 'briefNotes';
@@ -56,7 +56,7 @@ App.MainConfigHistoryController = Em.ArrayController.extend(App.TableServerMixin
     },
     {
       name: 'configGroup',
-      key: 'group_name',////
+      key: 'group_name',
       type: 'EQUAL'
     },
     {

http://git-wip-us.apache.org/repos/asf/ambari/blob/46a91dbb/ambari-web/app/controllers/main/service/info/configs.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/controllers/main/service/info/configs.js b/ambari-web/app/controllers/main/service/info/configs.js
index c10a4fa..dce4c35 100644
--- a/ambari-web/app/controllers/main/service/info/configs.js
+++ b/ambari-web/app/controllers/main/service/info/configs.js
@@ -79,8 +79,8 @@ App.MainServiceInfoConfigsController = Em.Controller.extend(App.ServerValidatorM
   }.property('App.isHadoop2Stack'),
 
   showConfigHistoryFeature: function() {
-    return (App.supports.configHistory && this.get('selectedConfigGroup.isDefault'));
-  }.property('selectedConfigGroup.isDefault'),
+    return App.supports.configHistory;
+  }.property('App.supports.configHistory'),
   /**
    * Map, which contains relation between group and site
    * to upload overridden properties

http://git-wip-us.apache.org/repos/asf/ambari/blob/46a91dbb/ambari-web/app/mappers/service_config_version_mapper.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/mappers/service_config_version_mapper.js b/ambari-web/app/mappers/service_config_version_mapper.js
index fcdb145..25d5aba 100644
--- a/ambari-web/app/mappers/service_config_version_mapper.js
+++ b/ambari-web/app/mappers/service_config_version_mapper.js
@@ -25,7 +25,8 @@ App.serviceConfigVersionsMapper = App.QuickDataMapper.create({
     service_id: 'service_name',
     version: "serviceconfigversion",
     create_time: 'createtime',
-    applied_time: 'appliedtime',
+    group_id: 'group_id',
+    group_name: 'group_name',
     author: 'user',
     notes: 'service_config_version_note',
     is_current: 'is_current',

http://git-wip-us.apache.org/repos/asf/ambari/blob/46a91dbb/ambari-web/app/messages.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/messages.js b/ambari-web/app/messages.js
index f569411..a02b729 100644
--- a/ambari-web/app/messages.js
+++ b/ambari-web/app/messages.js
@@ -88,6 +88,7 @@ Em.I18n.translations = {
   'common.service': 'Service',
   'common.version':'Version',
   'common.description':'Description',
+  'common.default':'Default',
   'common.client':'Client',
   'common.zookeeper':'ZooKeeper',
   'common.hbase':'HBase',

http://git-wip-us.apache.org/repos/asf/ambari/blob/46a91dbb/ambari-web/app/models/service_config_version.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/models/service_config_version.js b/ambari-web/app/models/service_config_version.js
index 3074c0e..807d76f 100644
--- a/ambari-web/app/models/service_config_version.js
+++ b/ambari-web/app/models/service_config_version.js
@@ -23,7 +23,8 @@ var dateUtil = require('utils/date');
 
 App.ServiceConfigVersion = DS.Model.extend({
   serviceName: DS.attr('string'),
-  configGroup: DS.attr('string'),
+  groupName: DS.attr('string'),
+  groupId: DS.attr('string'),
   version: DS.attr('number'),
   createTime: DS.attr('number'),
   author: DS.attr('string'),
@@ -32,10 +33,13 @@ App.ServiceConfigVersion = DS.Model.extend({
   index: DS.attr('number'),
   isCurrent: DS.attr('boolean'),
   currentTooltip: function () {
-    return Em.I18n.t('dashboard.configHistory.table.current.tooltip').format(this.get('serviceName'), this.get('configGroup') || '');
-  }.property('serviceName', 'configGroup'),
+    return Em.I18n.t('dashboard.configHistory.table.current.tooltip').format(this.get('serviceName'), this.get('configGroupName'));
+  }.property('serviceName', 'configGroupName'),
+  configGroupName: function () {
+    return this.get('groupName') || (this.get('serviceName') + ' ' + Em.I18n.t('common.default'));
+  }.property('groupName'),
   briefNotes: function () {
-    return (typeof this.get('notes') === 'string') ? this.get('notes').slice(0, 100) : " ";
+    return (typeof this.get('notes') === 'string') ? this.get('notes').slice(0, 100) : "";
   }.property('notes'),
   versionText: function () {
     return Em.I18n.t('dashboard.configHistory.table.version.versionText').format(this.get('version'));

http://git-wip-us.apache.org/repos/asf/ambari/blob/46a91dbb/ambari-web/app/styles/application.less
----------------------------------------------------------------------
diff --git a/ambari-web/app/styles/application.less b/ambari-web/app/styles/application.less
index 3a90d7d..dd0adb9 100644
--- a/ambari-web/app/styles/application.less
+++ b/ambari-web/app/styles/application.less
@@ -1106,6 +1106,10 @@ h1 {
       .icon-lock {
         color: #008000;
       }
+      a.btn[disabled],
+      a.btn[disabled] [class^="icon-"], a [class*=" icon-"] {
+        cursor: not-allowed;
+      }
       .action{
         margin-left: 3px;
         margin-right: 1px;
@@ -1211,6 +1215,7 @@ h1 {
       -webkit-box-shadow: inset 0 2px 4px rgba(0, 0, 0, 0.15), 0 1px 2px rgba(0, 0, 0, 0.05);
       -moz-box-shadow: inset 0 2px 4px rgba(0, 0, 0, 0.15), 0 1px 2px rgba(0, 0, 0, 0.05);
       box-shadow: inset 0 2px 4px rgba(0, 0, 0, 0.15), 0 1px 2px rgba(0, 0, 0, 0.05);
+      cursor: not-allowed;
     }
   }
   .capacity-scheduler {
@@ -4957,11 +4962,15 @@ ul.inline li {
     .flow-element {
       width: 18%;
       height: 100%;
-      .box {
+      .version-box {
         position: relative;
+        height: 90%;
+      }
+      .version-box .box {
+        //position: relative;
         cursor: pointer;
         width: 72%;
-        height: 90%;
+        height: 100%;
         background-color: #ffffff;
         border: 1px solid #dddddd;
         font-size: @default-font-size;
@@ -4975,14 +4984,49 @@ ul.inline li {
           color: #555555;
         }
       }
-      .box.displayed {
+      .version-box .version-popover {
+        display: none;
+        position: absolute;
+        bottom: 89px;
+        left: -45px;
+        z-index: 1000;
+        float: left;
+        min-width: 290px;
+        padding: 8px;
+        list-style: none;
+        background-color: #ffffff;
+        border: 1px solid #c3c3c3;
+        -webkit-box-shadow: 0 0 10px rgba(0, 0, 0, 0.2);
+        -moz-box-shadow: 0 0 10px rgba(0, 0, 0, 0.2);
+        box-shadow: 0 0 10px rgba(0, 0, 0, 0.2);
+        -webkit-background-clip: padding-box;
+        -moz-background-clip: padding;
+        background-clip: padding-box;
+        font-size: 13px;
+        .content {
+          padding: 1px 5px 15px 5px;
+          text-align: left;
+          .notes{
+            word-wrap: break-word;
+          }
+        }
+        .btn {
+          font-size: 13px;
+        }
+      }
+      .version-box:hover{
+        .version-popover {
+          display: block;
+        }
+      }
+      .version-box .box.displayed {
         border: 1px solid #444444;
         .content {
           color: #444444;
           font-weight: bold;
         }
       }
-      .box.grayedOut {
+      .version-box .box.grayedOut {
         background-color: #eeeeee;
         border: 1px solid #eeeeee;
         .content {
@@ -5006,7 +5050,7 @@ ul.inline li {
       .arrow-box {
         display: none;
       }
-      .box {
+      .version-box .box {
         width: 100%;
       }
       width: 13%;
@@ -5072,6 +5116,7 @@ ul.inline li {
     }
     .dropdown-submenu .dropdown-menu {
       min-width: 200px;
+      max-width: 300px;
       line-height: 20px;
       margin: 0px;
       padding: 5px;
@@ -5079,6 +5124,9 @@ ul.inline li {
       cursor: default;
       .content {
         padding: 1px 5px 15px 5px;
+        .notes{
+          word-wrap: break-word;
+        }
       }
       .btn {
         font-size: 13px;

http://git-wip-us.apache.org/repos/asf/ambari/blob/46a91dbb/ambari-web/app/templates/common/configs/config_history_flow.hbs
----------------------------------------------------------------------
diff --git a/ambari-web/app/templates/common/configs/config_history_flow.hbs b/ambari-web/app/templates/common/configs/config_history_flow.hbs
index 0722f1e..9aade90 100644
--- a/ambari-web/app/templates/common/configs/config_history_flow.hbs
+++ b/ambari-web/app/templates/common/configs/config_history_flow.hbs
@@ -23,18 +23,32 @@
     {{#each serviceVersion in view.visibleServiceVersion}}
       <div {{bindAttr class=":flow-element :pull-left serviceVersion.first:first"}}>
         <div class="arrow-box pull-left"><i class="icon-arrow-right icon-3x"></i></div>
-        <div {{bindAttr class=":box :pull-right serviceVersion.isDisplayed:displayed"}} {{action switchVersion serviceVersion target="view"}}>
-          <div class="top-label">
-            <span class="label label-info">{{serviceVersion.versionText}}</span>
-            {{#if serviceVersion.isCurrent}}
-              <span class="label label-success">
+        <div class="version-box">
+          <div {{bindAttr class=":box :pull-right serviceVersion.isDisplayed:displayed serviceVersion.isDisabled:grayedOut"}} {{action switchVersion serviceVersion target="view"}}>
+            <div class="top-label">
+              <span class="label label-info">{{serviceVersion.versionText}}</span>
+              {{#if serviceVersion.isCurrent}}
+                <span class="label label-success">
                 {{t common.current}}
                 <i {{bindAttr class=":icon-refresh :restart-required-service view.serviceVersion.isRestartRequired::hidden"}}></i>
               </span>
-            {{/if}}
+              {{/if}}
+            </div>
+              <div class="content">{{serviceVersion.author}}</div>
+              <div class="content">{{serviceVersion.shortModifiedDate}}</div>
+          </div>
+
+          <div class="version-popover">
+            <div class="content"> <strong>{{serviceVersion.serviceName}}</strong> <span class="label label-info">{{serviceVersion.versionText}}</span> &nbsp;
+              <strong>{{t services.service.config.configHistory.configGroup}}:{{serviceVersion.configGroupName}}</strong>
+              <div class="notes">{{serviceVersion.briefNotes}}</div>
+            </div>
+            <div>
+              <button class="btn" {{action switchVersion serviceVersion target="view"}}><i class="icon-search"></i>&nbsp;{{t common.view}}</button>
+              <button class="btn" {{bindAttr disabled="serviceVersion.isDisabled"}} {{action compare serviceVersion target="view"}}><i class="icon-copy"></i>&nbsp;{{t common.compare}}</button>
+              <button class="btn" {{bindAttr disabled="serviceVersion.isDisabled"}} {{action revert serviceVersion target="view"}}>{{t dashboard.configHistory.info-bar.revert.button}}</button>
+            </div>
           </div>
-          <div class="content">{{serviceVersion.author}}</div>
-          <div class="content">{{serviceVersion.shortModifiedDate}}</div>
         </div>
       </div>
     {{/each}}
@@ -58,13 +72,13 @@
               </div>
               <ul class="dropdown-menu">
                 <div class="content"> <strong>{{serviceVersion.serviceName}}</strong> <span class="label label-info">{{serviceVersion.versionText}}</span> &nbsp;
-                    <strong>{{t services.service.config.configHistory.configGroup}}:{{serviceVersion.configGroup}}</strong>
-                    <div>{{serviceVersion.briefNotes}}</div>
+                    <strong>{{t services.service.config.configHistory.configGroup}}:{{serviceVersion.configGroupName}}</strong>
+                    <div class="notes">{{serviceVersion.briefNotes}}</div>
                 </div>
                 <div>
-                  <button class="btn" {{bindAttr disabled="view.disableView"}} {{action switchVersion serviceVersion target="view"}}><i class="icon-search"></i>&nbsp;{{t common.view}}</button>
-                  <button class="btn" {{bindAttr disabled="view.disableCompare"}} {{action compare serviceVersion target="view"}}><i class="icon-copy"></i>&nbsp;{{t common.compare}}</button>
-                  <button class="btn" {{bindAttr disabled="view.disableMakeCurrent"}} {{action revert serviceVersion target="view"}}>{{t dashboard.configHistory.info-bar.revert.button}}</button>
+                  <button class="btn" {{action switchVersion serviceVersion target="view"}}><i class="icon-search"></i>&nbsp;{{t common.view}}</button>
+                  <button class="btn" {{bindAttr disabled="serviceVersion.isDisabled"}} {{action compare serviceVersion target="view"}}><i class="icon-copy"></i>&nbsp;{{t common.compare}}</button>
+                  <button class="btn" {{bindAttr disabled="serviceVersion.isDisabled"}} {{action revert serviceVersion target="view"}}>{{t dashboard.configHistory.info-bar.revert.button}}</button>
                 </div>
               </ul>
             </li>

http://git-wip-us.apache.org/repos/asf/ambari/blob/46a91dbb/ambari-web/app/templates/main/dashboard/config_history.hbs
----------------------------------------------------------------------
diff --git a/ambari-web/app/templates/main/dashboard/config_history.hbs b/ambari-web/app/templates/main/dashboard/config_history.hbs
index d25b3a0..de8680a 100644
--- a/ambari-web/app/templates/main/dashboard/config_history.hbs
+++ b/ambari-web/app/templates/main/dashboard/config_history.hbs
@@ -47,7 +47,7 @@
               </a>
               <i {{bindAttr class=":icon-refresh :restart-required-service item.isRestartRequired::hidden"}}></i>
             </td>
-            <td>{{item.configGroup}}
+            <td>{{item.configGroupName}}
               {{#if item.isCurrent}}
                 <span class="label label-success" rel="currentTooltip"
                 {{bindAttr data-original-title="item.currentTooltip"}}>{{t common.current}}
@@ -61,13 +61,13 @@
         {{/each}}
       {{else}}
         <tr>
-          <td class="first" colspan="6">
+          <td class="first" colspan="5">
             {{t dashboard.configHistory.table.empty}}
           </td>
         </tr>
       {{/if}}
     {{else}}
-      <tr><td colspan="6"><div class="spinner"></div></td></tr>
+      <tr><td colspan="5"><div class="spinner"></div></td></tr>
     {{/if}}
     </tbody>
 </table>

http://git-wip-us.apache.org/repos/asf/ambari/blob/46a91dbb/ambari-web/app/utils/ajax/ajax.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/utils/ajax/ajax.js b/ambari-web/app/utils/ajax/ajax.js
index 987696c..3140595 100644
--- a/ambari-web/app/utils/ajax/ajax.js
+++ b/ambari-web/app/utils/ajax/ajax.js
@@ -1894,7 +1894,7 @@ var urls = {
     }
   },
   'service.serviceConfigVersions.get': {
-    real: '/clusters/{clusterName}/configurations/serviceconfigversions?service_name={serviceName}&fields=serviceconfigversion,user,appliedtime,createtime,service_name,service_config_version_note&minimal_response=true',
+    real: '/clusters/{clusterName}/configurations/serviceconfigversions?service_name={serviceName}&fields=serviceconfigversion,user,group_id,group_name,createtime,service_name,service_config_version_note&minimal_response=true',
     mock: '/data/configurations/service_versions.json'
   },
   'service.serviceConfigVersions.get.current': {

http://git-wip-us.apache.org/repos/asf/ambari/blob/46a91dbb/ambari-web/app/views/common/configs/config_history_flow.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views/common/configs/config_history_flow.js b/ambari-web/app/views/common/configs/config_history_flow.js
index b6881a5..a4c21af 100644
--- a/ambari-web/app/views/common/configs/config_history_flow.js
+++ b/ambari-web/app/views/common/configs/config_history_flow.js
@@ -42,6 +42,14 @@ App.ConfigHistoryFlowView = Em.View.extend({
     return this.get('controller.selectedService.serviceName');
   }.property('controller.selectedService.serviceName'),
 
+  selectedConfigGroupName: function () {
+    return this.get('controller.selectedConfigGroup.displayName');
+  }.property('controller.selectedConfigGroup.displayName'),
+
+  isDefaultConfigGroupSelected: function () {
+    return this.get('controller.selectedConfigGroup.isDefault');
+  }.property('controller.selectedConfigGroup.isDefault'),
+
   displayedServiceVersion: function () {
     return this.get('serviceVersions').findProperty('isDisplayed');
   }.property('serviceVersions.@each.isDisplayed'),
@@ -64,11 +72,30 @@ App.ConfigHistoryFlowView = Em.View.extend({
   }.property('displayedServiceVersion'),
 
   serviceVersions: function () {
+    var serviceVersions;
     var allServiceVersions = App.ServiceConfigVersion.find().filterProperty('serviceName', this.get('serviceName'));
-    return allServiceVersions.sort(function (a, b) {
+    if (this.get('isDefaultConfigGroupSelected')) {
+      // filtered all versions which belong to default group
+      serviceVersions = allServiceVersions.filterProperty('groupName', null);
+      serviceVersions.forEach( function (version) {
+        version.set('isDisabled', false);
+      });
+    }else {
+      // filter out default group(should be grayedOut) and current selectedGroup versions
+      var defaultServiceVersions = allServiceVersions.filterProperty('groupName', null);
+      defaultServiceVersions.forEach( function (version) {
+        version.set('isDisabled', true);
+      });
+      var selectedServiceVersions = allServiceVersions.filterProperty('groupName', this.get('selectedConfigGroupName'));
+      selectedServiceVersions.forEach( function (version) {
+        version.set('isDisabled', false);
+      });
+      serviceVersions = selectedServiceVersions.concat(defaultServiceVersions) ;
+    }
+    return serviceVersions.sort(function (a, b) {
       return Em.get(a, 'createTime') - Em.get(b, 'createTime');
     });
-  }.property('serviceName'),
+  }.property('serviceName', 'selectedConfigGroupName', 'isDefaultConfigGroupSelected'),
   /**
    * service versions which in viewport and visible to user
    */
@@ -204,6 +231,8 @@ App.ConfigHistoryFlowView = Em.View.extend({
    * add a second version-info-bar for the chosen version
    */
   compare: function (event) {
+    var isDisabled = event.context ? event.context.get('isDisabled') : false;
+    if (isDisabled) return;
     this.set('controller.compareServiceVersion', event.context);
     this.get('controller').onConfigGroupChange();
   },
@@ -212,6 +241,8 @@ App.ConfigHistoryFlowView = Em.View.extend({
    */
   revert: function (event) {
     var self = this;
+    var isDisabled = event.context ? event.context.get('isDisabled') : false;
+    if (isDisabled) return;
     var serviceConfigVersion = event.context || Em.Object.create({
       version: this.get('displayedServiceVersion.version'),
       serviceName: this.get('displayedServiceVersion.serviceName')
@@ -220,7 +251,7 @@ App.ConfigHistoryFlowView = Em.View.extend({
     App.showConfirmationPopup(function () {
         self.sendRevertCall(serviceConfigVersion);
       },
-      Em.I18n.t('services.service.config.configHistory.makeCurrent.message').format(versionText, this.get('displayedServiceVersion.serviceName'), this.get('displayedServiceVersion.configGroup'))
+      Em.I18n.t('services.service.config.configHistory.makeCurrent.message').format(versionText, this.get('displayedServiceVersion.serviceName'), this.get('displayedServiceVersion.configGroupName'))
     );
   },
 


[30/35] git commit: AMBARI-7019. sqoop-env and other configs exists for unavailable services(after upgrade from ambari 1.6.1 -> 1.7.0).(vbrodetskyi)

Posted by jo...@apache.org.
AMBARI-7019. sqoop-env and other configs exists for unavailable services(after upgrade from ambari 1.6.1 -> 1.7.0).(vbrodetskyi)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/926acf4f
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/926acf4f
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/926acf4f

Branch: refs/heads/branch-alerts-dev
Commit: 926acf4fc681e01f1568f6c3bdbf4ab254040b07
Parents: 2dc55f0
Author: Vitaly Brodetskyi <vb...@hortonworks.com>
Authored: Wed Aug 27 13:29:39 2014 +0300
Committer: Vitaly Brodetskyi <vb...@hortonworks.com>
Committed: Wed Aug 27 13:29:39 2014 +0300

----------------------------------------------------------------------
 .../controller/AmbariManagementControllerImpl.java      |  2 +-
 .../controller/internal/ClusterResourceProvider.java    |  6 +++---
 .../org/apache/ambari/server/state/ConfigHelper.java    | 12 ++++++------
 .../apache/ambari/server/upgrade/UpgradeCatalog170.java |  6 ++++--
 .../controller/AmbariManagementControllerTest.java      |  4 +++-
 .../ambari/server/upgrade/UpgradeCatalog170Test.java    |  9 +++++----
 6 files changed, 22 insertions(+), 17 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/926acf4f/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java
index f7f2f2d..c6b2f9f 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java
@@ -657,7 +657,7 @@ public class AmbariManagementControllerImpl implements AmbariManagementControlle
     if(request.getType().equals(Configuration.GLOBAL_CONFIG_TAG)) {
       Map<String, Map<String, String>> configTypes = new HashMap<String, Map<String, String>>();
       configTypes.put(Configuration.GLOBAL_CONFIG_TAG, request.getProperties());
-      configHelper.moveDeprecatedGlobals(cluster.getCurrentStackVersion(), configTypes);
+      configHelper.moveDeprecatedGlobals(cluster.getCurrentStackVersion(), configTypes, cluster.getClusterName());
 
       for(Map.Entry<String, Map<String, String>> configType : configTypes.entrySet()) {
         String configTypeName = configType.getKey();

http://git-wip-us.apache.org/repos/asf/ambari/blob/926acf4f/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClusterResourceProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClusterResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClusterResourceProvider.java
index 113e7b8..eeee694 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClusterResourceProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClusterResourceProvider.java
@@ -870,9 +870,9 @@ public class ClusterResourceProvider extends BaseBlueprintProcessor {
    * @param blueprintConfigurations  map of blueprint configurations keyed by type
    */
   private void handleGlobalsBackwardsCompability(Stack stack,
-      Map<String, Map<String, String>> blueprintConfigurations) {
+      Map<String, Map<String, String>> blueprintConfigurations, String clusterName) {
     StackId stackId = new StackId(stack.getName(), stack.getVersion());
-    configHelper.moveDeprecatedGlobals(stackId, blueprintConfigurations);
+    configHelper.moveDeprecatedGlobals(stackId, blueprintConfigurations, clusterName);
   }
 
   /**
@@ -1051,7 +1051,7 @@ public class ClusterResourceProvider extends BaseBlueprintProcessor {
       HostGroupEntity entity = group.getEntity();
       Map<String, Map<String, Config>> groupConfigs = new HashMap<String, Map<String, Config>>();
       
-      handleGlobalsBackwardsCompability(stack, group.getConfigurationProperties());
+      handleGlobalsBackwardsCompability(stack, group.getConfigurationProperties(), clusterName);
       for (Map.Entry<String, Map<String, String>> entry: group.getConfigurationProperties().entrySet()) {
         String type = entry.getKey();
         String service = stack.getServiceForConfigType(type);

http://git-wip-us.apache.org/repos/asf/ambari/blob/926acf4f/ambari-server/src/main/java/org/apache/ambari/server/state/ConfigHelper.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/ConfigHelper.java b/ambari-server/src/main/java/org/apache/ambari/server/state/ConfigHelper.java
index a0d9e6e..43838d7 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/ConfigHelper.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/ConfigHelper.java
@@ -414,14 +414,14 @@ public class ConfigHelper {
    * @param stackId
    * @param propertyName
    */
-  public Set<String> findConfigTypesByPropertyName(StackId stackId, String propertyName) throws AmbariException {
+  public Set<String> findConfigTypesByPropertyName(StackId stackId, String propertyName, String clusterName) throws AmbariException {
     StackInfo stack = ambariMetaInfo.getStackInfo(stackId.getStackName(),
         stackId.getStackVersion());
     
     Set<String> result = new HashSet<String>();
-    
-    for(ServiceInfo serviceInfo:stack.getServices()) {
-      Set<PropertyInfo> stackProperties = ambariMetaInfo.getProperties(stack.getName(), stack.getVersion(), serviceInfo.getName());
+
+    for(Service service : clusters.getCluster(clusterName).getServices().values()) {
+      Set<PropertyInfo> stackProperties = ambariMetaInfo.getProperties(stack.getName(), stack.getVersion(), service.getName());
       
       for (PropertyInfo stackProperty : stackProperties) {
         if(stackProperty.getName().equals(propertyName)) {
@@ -488,7 +488,7 @@ public class ConfigHelper {
    *
    * @param configurations  map of configurations keyed by type
    */
-  public void moveDeprecatedGlobals(StackId stackId, Map<String, Map<String, String>> configurations) {
+  public void moveDeprecatedGlobals(StackId stackId, Map<String, Map<String, String>> configurations, String clusterName) {
     Map<String, String> globalConfigurations = new HashMap<String, String>();
     
     if(configurations.get(Configuration.GLOBAL_CONFIG_TAG) == null ||
@@ -508,7 +508,7 @@ public class ConfigHelper {
       
       Set<String> newConfigTypes = null;
       try{
-        newConfigTypes = this.findConfigTypesByPropertyName(stackId, propertyName);
+        newConfigTypes = this.findConfigTypesByPropertyName(stackId, propertyName, clusterName);
       } catch(AmbariException e) {
         LOG.error("Exception while getting configurations from the stacks", e);
         return;

http://git-wip-us.apache.org/repos/asf/ambari/blob/926acf4f/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog170.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog170.java b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog170.java
index 864c084..0ac9da7 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog170.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog170.java
@@ -812,7 +812,8 @@ public class UpgradeCatalog170 extends AbstractUpgradeCatalog {
 
     if (clusterMap != null && !clusterMap.isEmpty()) {
       for (final Cluster cluster : clusterMap.values()) {
-        Set<String> configTypes = configHelper.findConfigTypesByPropertyName(cluster.getCurrentStackVersion(), CONTENT_FIELD_NAME);
+        Set<String> configTypes = configHelper.findConfigTypesByPropertyName(cluster.getCurrentStackVersion(),
+                CONTENT_FIELD_NAME, cluster.getClusterName());
 
         for(String configType:configTypes) {
           if(!configType.endsWith(ENV_CONFIGS_POSTFIX)) {
@@ -853,7 +854,8 @@ public class UpgradeCatalog170 extends AbstractUpgradeCatalog {
           String propertyName = property.getKey();
           String propertyValue = property.getValue();
 
-          Set<String> newConfigTypes = configHelper.findConfigTypesByPropertyName(cluster.getCurrentStackVersion(), propertyName);
+          Set<String> newConfigTypes = configHelper.findConfigTypesByPropertyName(cluster.getCurrentStackVersion(),
+                  propertyName, cluster.getClusterName());
           // if it's custom user service global.xml can be still there.
           newConfigTypes.remove(Configuration.GLOBAL_CONFIG_TAG);
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/926acf4f/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java
index cf2bcbb..1bfc25a 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java
@@ -8936,6 +8936,7 @@ public class AmbariManagementControllerTest {
 
       Set<ServiceRequest> serviceRequests = new HashSet<ServiceRequest>();
       serviceRequests.add(new ServiceRequest("c1", "HDFS", null));
+      serviceRequests.add(new ServiceRequest("c1", "HIVE", null));
 
       ServiceResourceProviderTest.createServices(amc, serviceRequests);
 
@@ -9144,9 +9145,10 @@ public class AmbariManagementControllerTest {
       ServiceResourceProviderTest.updateServices(amc, serviceRequests, mapRequestProps, true, false);
       serviceRequests.clear();
       serviceRequests.add(new ServiceRequest("c1", null, null));
-      org.junit.Assert.assertEquals(1, ServiceResourceProviderTest.getServices(amc, serviceRequests).size());
+      org.junit.Assert.assertEquals(2, ServiceResourceProviderTest.getServices(amc, serviceRequests).size());
       serviceRequests.clear();
       serviceRequests.add(new ServiceRequest("c1", "HDFS", null));
+      serviceRequests.add(new ServiceRequest("c1", "HIVE", null));
       ServiceResourceProviderTest.deleteServices(amc, serviceRequests);
       serviceRequests.clear();
       serviceRequests.add(new ServiceRequest("c1", null, null));

http://git-wip-us.apache.org/repos/asf/ambari/blob/926acf4f/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog170Test.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog170Test.java b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog170Test.java
index 5210add..e2ce75e 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog170Test.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog170Test.java
@@ -224,7 +224,7 @@ public class UpgradeCatalog170Test {
     Injector injector = createNiceMock(Injector.class);
     ConfigHelper configHelper = createNiceMock(ConfigHelper.class);
     AmbariManagementController amc = createNiceMock(AmbariManagementController.class);
-    Cluster cluster = createStrictMock(Cluster.class);
+    Cluster cluster = createNiceMock(Cluster.class);
     Clusters clusters = createStrictMock(Clusters.class);
     Config config = createStrictMock(Config.class);
     Config pigConfig = createStrictMock(Config.class);
@@ -323,9 +323,10 @@ public class UpgradeCatalog170Test {
     expect(cluster.getDesiredConfigByType("global")).andReturn(config).anyTimes();
     expect(config.getProperties()).andReturn(globalConfigs).anyTimes();
     expect(cluster.getCurrentStackVersion()).andReturn(new StackId("HDP", "2.1")).anyTimes();
-    expect(configHelper.findConfigTypesByPropertyName(new StackId("HDP", "2.1"), "prop1")).andReturn(envDicts).once();
-    expect(configHelper.findConfigTypesByPropertyName(new StackId("HDP", "2.1"), "smokeuser_keytab")).andReturn(new HashSet<String>()).once();
-    expect(configHelper.findConfigTypesByPropertyName(new StackId("HDP", "2.1"), "content")).andReturn(envDicts).once();
+    expect(cluster.getClusterName()).andReturn("c1").anyTimes();
+    expect(configHelper.findConfigTypesByPropertyName(new StackId("HDP", "2.1"), "prop1", "c1")).andReturn(envDicts).once();
+    expect(configHelper.findConfigTypesByPropertyName(new StackId("HDP", "2.1"), "smokeuser_keytab", "c1")).andReturn(new HashSet<String>()).once();
+    expect(configHelper.findConfigTypesByPropertyName(new StackId("HDP", "2.1"), "content", "c1")).andReturn(envDicts).once();
     expect(configHelper.getPropertyValueFromStackDefenitions(cluster, "hadoop-env", "content")).andReturn("env file contents").once();
 
     expect(injector.getInstance(UserDAO.class)).andReturn(userDAO).anyTimes();


[27/35] git commit: AMBARI-7024 - Alerts: DDL Update For Default Service Alert Groups (jonathanhurley)

Posted by jo...@apache.org.
AMBARI-7024 - Alerts: DDL Update For Default Service Alert Groups (jonathanhurley)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/0e9c744f
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/0e9c744f
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/0e9c744f

Branch: refs/heads/branch-alerts-dev
Commit: 0e9c744f21a7b8aaada917eafbf3fbbed1a87edc
Parents: 1ab3bb5
Author: Jonathan Hurley <jh...@hortonworks.com>
Authored: Tue Aug 26 17:53:28 2014 -0400
Committer: Jonathan Hurley <jh...@hortonworks.com>
Committed: Tue Aug 26 19:51:38 2014 -0400

----------------------------------------------------------------------
 .../server/orm/entities/AlertGroupEntity.java   | 74 ++++++++++++++------
 .../server/upgrade/UpgradeCatalog170.java       |  1 +
 .../main/resources/Ambari-DDL-MySQL-CREATE.sql  |  1 +
 .../main/resources/Ambari-DDL-Oracle-CREATE.sql |  1 +
 .../resources/Ambari-DDL-Postgres-CREATE.sql    |  1 +
 .../Ambari-DDL-Postgres-EMBEDDED-CREATE.sql     |  1 +
 .../server/upgrade/UpgradeCatalog170Test.java   |  4 +-
 7 files changed, 60 insertions(+), 23 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/0e9c744f/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/AlertGroupEntity.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/AlertGroupEntity.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/AlertGroupEntity.java
index 976855e..1ca592c 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/AlertGroupEntity.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/AlertGroupEntity.java
@@ -65,6 +65,9 @@ public class AlertGroupEntity {
   @Column(name = "is_default", nullable = false)
   private Integer isDefault = Integer.valueOf(0);
 
+  @Column(name = "service_name", nullable = true, length = 255)
+  private String serviceName;
+
   /**
    * Bi-directional many-to-many association to {@link AlertDefinitionEntity}
    */
@@ -81,7 +84,7 @@ public class AlertGroupEntity {
 
   /**
    * Gets the unique ID of this grouping of alerts.
-   * 
+   *
    * @return the ID (never {@code null}).
    */
   public Long getGroupId() {
@@ -90,7 +93,7 @@ public class AlertGroupEntity {
 
   /**
    * Sets the unique ID of this grouping of alerts.
-   * 
+   *
    * @param groupId
    *          the ID (not {@code null}).
    */
@@ -100,7 +103,7 @@ public class AlertGroupEntity {
 
   /**
    * Gets the ID of the cluster that this alert group is a part of.
-   * 
+   *
    * @return the ID (never {@code null}).
    */
   public Long getClusterId() {
@@ -109,7 +112,7 @@ public class AlertGroupEntity {
 
   /**
    * Sets the ID of the cluster that this alert group is a part of.
-   * 
+   *
    * @param clusterId
    *          the ID of the cluster (not {@code null}).
    */
@@ -120,7 +123,7 @@ public class AlertGroupEntity {
   /**
    * Gets the name of the grouping of alerts. Group names are unique in a given
    * cluster.
-   * 
+   *
    * @return the group name (never {@code null}).
    */
   public String getGroupName() {
@@ -130,7 +133,7 @@ public class AlertGroupEntity {
   /**
    * Sets the name of this grouping of alerts. Group names are unique in a given
    * cluster.
-   * 
+   *
    * @param groupName
    *          the name of the group (not {@code null}).
    */
@@ -143,7 +146,7 @@ public class AlertGroupEntity {
    * groups cannot have their alert definition groupings changed. New alert
    * definitions are automtaically added to the default group that belongs to
    * the service of that definition.
-   * 
+   *
    * @return {@code true} if this is a default group, {@code false} otherwise.
    */
   public boolean isDefault() {
@@ -152,7 +155,7 @@ public class AlertGroupEntity {
 
   /**
    * Sets whether this is a default group and is immutable.
-   * 
+   *
    * @param isDefault
    *          {@code true} to make this group immutable.
    */
@@ -161,8 +164,31 @@ public class AlertGroupEntity {
   }
 
   /**
-   * Gets all of the alert definitions that are a part of this grouping.
+   * Gets the name of the service. This is only applicable when
+   * {@link #isDefault()} is {@code true}.
+   *
+   * @return the service that this is the default group for, or {@code null} if
+   *         this is not a default group.
+   */
+  public String getServiceName() {
+    return serviceName;
+  }
+
+  /**
+   * Set the service name. This is only applicable when {@link #isDefault()} is
+   * {@code true}.
    * 
+   * @param serviceName
+   *          the service that this is the default group for, or {@code null} if
+   *          this is not a default group.
+   */
+  public void setServiceName(String serviceName) {
+    this.serviceName = serviceName;
+  }
+
+  /**
+   * Gets all of the alert definitions that are a part of this grouping.
+   *
    * @return the alert definitions or {@code null} if none.
    */
   public Set<AlertDefinitionEntity> getAlertDefinitions() {
@@ -171,7 +197,7 @@ public class AlertGroupEntity {
 
   /**
    * Set all of the alert definitions that are part of this alert group.
-   * 
+   *
    * @param alertDefinitions
    *          the definitions, or {@code null} for none.
    */
@@ -182,25 +208,27 @@ public class AlertGroupEntity {
   /**
    * Gets an immutable set of the targets that will receive notifications for
    * alert definitions in this group.
-   * 
+   *
    * @return the targets, or {@code null} if there are none.
    */
   public Set<AlertTargetEntity> getAlertTargets() {
-    if( null == alertTargets )
+    if( null == alertTargets ) {
       return Collections.emptySet();
-      
+    }
+
     return Collections.unmodifiableSet(alertTargets);
   }
 
   /**
    * Adds the specified target to the targets that this group will dispatch to.
-   * 
+   *
    * @param alertTarget
    *          the target to add (not {@code null}).
    */
   public void addAlertTarget(AlertTargetEntity alertTarget) {
-    if (null == alertTargets)
+    if (null == alertTargets) {
       alertTargets = new HashSet<AlertTargetEntity>();
+    }
 
     alertTargets.add(alertTarget);
     alertTarget.addAlertGroup(this);
@@ -209,13 +237,14 @@ public class AlertGroupEntity {
   /**
    * Removes the specified target from the targets that this group will dispatch
    * to.
-   * 
+   *
    * @param alertTarget
    *          the target to remove (not {@code null}).
    */
   public void removeAlertTarget(AlertTargetEntity alertTarget) {
-    if (null != alertTargets)
+    if (null != alertTargets) {
       alertTargets.remove(alertTarget);
+    }
 
     alertTarget.removeAlertGroup(this);
   }
@@ -223,7 +252,7 @@ public class AlertGroupEntity {
   /**
    * Sets all of the targets that will receive notifications for alert
    * definitions in this group.
-   * 
+   *
    * @param alertTargets
    *          the targets, or {@code null} if there are none.
    */
@@ -248,16 +277,19 @@ public class AlertGroupEntity {
    */
   @Override
   public boolean equals(Object object) {
-    if (this == object)
+    if (this == object) {
       return true;
+    }
 
-    if (object == null || getClass() != object.getClass())
+    if (object == null || getClass() != object.getClass()) {
       return false;
+    }
 
     AlertGroupEntity that = (AlertGroupEntity) object;
 
-    if (groupId != null ? !groupId.equals(that.groupId) : that.groupId != null)
+    if (groupId != null ? !groupId.equals(that.groupId) : that.groupId != null) {
       return false;
+    }
 
     return true;
   }

http://git-wip-us.apache.org/repos/asf/ambari/blob/0e9c744f/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog170.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog170.java b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog170.java
index dc3d5b8..864c084 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog170.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog170.java
@@ -671,6 +671,7 @@ public class UpgradeCatalog170 extends AbstractUpgradeCatalog {
     columns.add(new DBColumnInfo("cluster_id", Long.class, null, null, false));
     columns.add(new DBColumnInfo("group_name", String.class, 255, null, false));
     columns.add(new DBColumnInfo("is_default", Short.class, 1, 1, false));
+    columns.add(new DBColumnInfo("service_name", String.class, 255, null, true));
     dbAccessor.createTable(ALERT_TABLE_GROUP, columns, "group_id");
 
     dbAccessor.executeQuery(

http://git-wip-us.apache.org/repos/asf/ambari/blob/0e9c744f/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql
index 2d83aeb..59d972e 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql
@@ -200,6 +200,7 @@ CREATE TABLE alert_group (
   cluster_id BIGINT NOT NULL,
   group_name VARCHAR(255) NOT NULL,
   is_default SMALLINT NOT NULL DEFAULT 0,
+  service_name VARCHAR(255),
   PRIMARY KEY (group_id),
   CONSTRAINT uni_alert_group_name UNIQUE(cluster_id,group_name)
 );

http://git-wip-us.apache.org/repos/asf/ambari/blob/0e9c744f/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql
index 241bb2b..52ca857 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql
@@ -191,6 +191,7 @@ CREATE TABLE alert_group (
   cluster_id NUMBER(19) NOT NULL,
   group_name VARCHAR2(255) NOT NULL,
   is_default NUMBER(1) DEFAULT 0 NOT NULL,
+  service_name VARCHAR2(255),
   PRIMARY KEY (group_id),
   CONSTRAINT uni_alert_group_name UNIQUE(cluster_id,group_name)
 );

http://git-wip-us.apache.org/repos/asf/ambari/blob/0e9c744f/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql
index ddea71c..39d3ecf 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql
@@ -223,6 +223,7 @@ CREATE TABLE alert_group (
   cluster_id BIGINT NOT NULL,
   group_name VARCHAR(255) NOT NULL,
   is_default SMALLINT NOT NULL DEFAULT 0,
+  service_name VARCHAR(255),
   PRIMARY KEY (group_id),
   CONSTRAINT uni_alert_group_name UNIQUE(cluster_id,group_name)
 );

http://git-wip-us.apache.org/repos/asf/ambari/blob/0e9c744f/ambari-server/src/main/resources/Ambari-DDL-Postgres-EMBEDDED-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-Postgres-EMBEDDED-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-Postgres-EMBEDDED-CREATE.sql
index 17b95c2..196d298 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-Postgres-EMBEDDED-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-Postgres-EMBEDDED-CREATE.sql
@@ -287,6 +287,7 @@ CREATE TABLE ambari.alert_group (
   cluster_id BIGINT NOT NULL,
   group_name VARCHAR(255) NOT NULL,
   is_default SMALLINT NOT NULL DEFAULT 0,
+  service_name VARCHAR(255),
   PRIMARY KEY (group_id),
   CONSTRAINT uni_alert_group_name UNIQUE(cluster_id,group_name)
 );

http://git-wip-us.apache.org/repos/asf/ambari/blob/0e9c744f/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog170Test.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog170Test.java b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog170Test.java
index 1cfba46..5210add 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog170Test.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog170Test.java
@@ -208,7 +208,7 @@ public class UpgradeCatalog170Test {
     assertEquals(12, alertDefinitionColumnCapture.getValue().size());
     assertEquals(11, alertHistoryColumnCapture.getValue().size());
     assertEquals(6, alertCurrentColumnCapture.getValue().size());
-    assertEquals(4, alertGroupColumnCapture.getValue().size());
+    assertEquals(5, alertGroupColumnCapture.getValue().size());
     assertEquals(5, alertTargetCapture.getValue().size());
     assertEquals(2, alertGroupTargetCapture.getValue().size());
     assertEquals(2, alertGroupingCapture.getValue().size());
@@ -428,7 +428,7 @@ public class UpgradeCatalog170Test {
     dbAccessor.addColumn(eq("clusterconfig"),
         capture(clusterConfigAttributesColumnCapture));
   }
- 
+
   private void setStageExpectations(DBAccessor dbAccessor,
                                     Capture<DBAccessor.DBColumnInfo> stageCommandParamsColumnCapture,
                                     Capture<DBAccessor.DBColumnInfo> stageHostParamsColumnCapture)


[10/35] git commit: AMBARI-7007 - href is wrong for sub resource query of request resource

Posted by jo...@apache.org.
AMBARI-7007 - href is wrong for sub resource query of request resource


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/8bed2592
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/8bed2592
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/8bed2592

Branch: refs/heads/branch-alerts-dev
Commit: 8bed25926031736ae6eed2f774e7996f230b5e13
Parents: 160abc5
Author: tbeerbower <tb...@hortonworks.com>
Authored: Mon Aug 25 20:04:04 2014 -0400
Committer: tbeerbower <tb...@hortonworks.com>
Committed: Tue Aug 26 08:28:45 2014 -0400

----------------------------------------------------------------------
 .../server/api/resources/RequestResourceDefinition.java | 12 ++++++++----
 1 file changed, 8 insertions(+), 4 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/8bed2592/ambari-server/src/main/java/org/apache/ambari/server/api/resources/RequestResourceDefinition.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/api/resources/RequestResourceDefinition.java b/ambari-server/src/main/java/org/apache/ambari/server/api/resources/RequestResourceDefinition.java
index a3920d1..291b01a 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/api/resources/RequestResourceDefinition.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/api/resources/RequestResourceDefinition.java
@@ -83,15 +83,19 @@ public class RequestResourceDefinition extends BaseResourceDefinition {
         sb.append(href);
         sb.append('/').append(requestId);
       } else {
-        // split the href up into its parts, intercepting "clusers" in order
+        // split the href up into its parts, intercepting "clusters" in order
         // to rewrite the href to be scoped for requests
         String[] tokens = href.split("/");
 
-        for (int i = 0; i < tokens.length; ++i) {
+        Integer tokenCount = tokens.length;
+        for (int i = 0; i < tokenCount; ++i) {
           String fragment = tokens[i];
-          sb.append(fragment).append('/');
+          sb.append(fragment);
+          if (i < tokenCount - 1 ) {
+            sb.append('/');
+          }
 
-          if ("clusters".equals(fragment) && i + 1 < tokens.length) {
+          if ("clusters".equals(fragment) && i + 1 < tokenCount) {
             String clusterName = tokens[i + 1];
             sb.append(clusterName).append("/");
             sb.append("requests/").append(requestId);


[29/35] git commit: AMBARI-7028. Config History: need to set notes for the new SCV when reverting back via 'Make Current' action.(xiwang)

Posted by jo...@apache.org.
AMBARI-7028. Config History: need to set notes for the new SCV when reverting back via 'Make Current' action.(xiwang)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/2dc55f08
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/2dc55f08
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/2dc55f08

Branch: refs/heads/branch-alerts-dev
Commit: 2dc55f08fe7b1660122145853ede4d7a04c014a8
Parents: a8e6736
Author: Xi Wang <xi...@apache.org>
Authored: Tue Aug 26 17:35:45 2014 -0700
Committer: Xi Wang <xi...@apache.org>
Committed: Tue Aug 26 17:48:46 2014 -0700

----------------------------------------------------------------------
 ambari-web/app/messages.js                      |  3 +-
 .../views/common/configs/config_history_flow.js | 32 ++++++++++++++++----
 2 files changed, 28 insertions(+), 7 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/2dc55f08/ambari-web/app/messages.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/messages.js b/ambari-web/app/messages.js
index 31d3e69..9579b04 100644
--- a/ambari-web/app/messages.js
+++ b/ambari-web/app/messages.js
@@ -1410,7 +1410,7 @@ Em.I18n.translations = {
     'on the Ambari Server host to make the JDBC driver available and to enable testing the database connection.',
 
   'services.service.config.configHistory.configGroup': 'Group',
-  'services.service.config.configHistory.makeCurrent.message': 'Are you sure you want to make {0} the current version for {1}:{2} ?',
+  'services.service.config.configHistory.makeCurrent.message': 'Created from service config version {0}',
 
   'services.add.header':'Add Service Wizard',
   'services.reassign.header':'Move Master Wizard',
@@ -2000,6 +2000,7 @@ Em.I18n.translations = {
   'dashboard.configHistory.info-bar.changesToHandle': 'Changes to handle',
   'dashboard.configHistory.info-bar.showMore': 'Show more',
   'dashboard.configHistory.info-bar.save.popup.title': 'Save Configuration',
+  'dashboard.configHistory.info-bar.makeCurrent.popup.title': 'Make Current Confirmation',
   'dashboard.configHistory.info-bar.save.popup.placeholder': 'What did you change?',
   'dashboard.configHistory.info-bar.revert.button': 'Make current',
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/2dc55f08/ambari-web/app/views/common/configs/config_history_flow.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views/common/configs/config_history_flow.js b/ambari-web/app/views/common/configs/config_history_flow.js
index a4c21af..0c0e686 100644
--- a/ambari-web/app/views/common/configs/config_history_flow.js
+++ b/ambari-web/app/views/common/configs/config_history_flow.js
@@ -245,14 +245,33 @@ App.ConfigHistoryFlowView = Em.View.extend({
     if (isDisabled) return;
     var serviceConfigVersion = event.context || Em.Object.create({
       version: this.get('displayedServiceVersion.version'),
-      serviceName: this.get('displayedServiceVersion.serviceName')
+      serviceName: this.get('displayedServiceVersion.serviceName'),
+      notes:''
     });
     var versionText = event.context ? event.context.get('versionText') : this.get('displayedServiceVersion.versionText');
-    App.showConfirmationPopup(function () {
+    var configGroupName = this.get('displayedServiceVersion.configGroupName');
+    return App.ModalPopup.show({
+      header: Em.I18n.t('dashboard.configHistory.info-bar.makeCurrent.popup.title'),
+      serviceConfigNote: Em.I18n.t('services.service.config.configHistory.makeCurrent.message').format(versionText),
+      bodyClass: Em.View.extend({
+        templateName: require('templates/common/configs/save_configuration'),
+        notesArea: Em.TextArea.extend({
+          classNames: ['full-width'],
+          value: Em.I18n.t('services.service.config.configHistory.makeCurrent.message').format(versionText),
+          onChangeValue: function() {
+            this.get('parentView.parentView').set('serviceConfigNote', this.get('value'));
+          }.observes('value')
+        })
+      }),
+      primary: Em.I18n.t('dashboard.configHistory.info-bar.revert.button'),
+      secondary: Em.I18n.t('common.discard'),
+      third: Em.I18n.t('common.cancel'),
+      onPrimary: function () {
+        serviceConfigVersion.set('serviceConfigNote', this.get('serviceConfigNote'));
         self.sendRevertCall(serviceConfigVersion);
-      },
-      Em.I18n.t('services.service.config.configHistory.makeCurrent.message').format(versionText, this.get('displayedServiceVersion.serviceName'), this.get('displayedServiceVersion.configGroupName'))
-    );
+        this.hide();
+      }
+    });
   },
 
   /**
@@ -268,7 +287,8 @@ App.ConfigHistoryFlowView = Em.View.extend({
           "Clusters": {
             "desired_serviceconfigversions": {
               "serviceconfigversion": serviceConfigVersion.get('version'),
-              "service_name": serviceConfigVersion.get('serviceName')
+              "service_name": serviceConfigVersion.get('serviceName'),
+              "service_config_version_note": serviceConfigVersion.get('serviceConfigNote')
             }
           }
         }


[31/35] git commit: AMBARI-7031 FE: Next button not enabled when only /validation ERRORs are there

Posted by jo...@apache.org.
AMBARI-7031 FE: Next button not enabled when only /validation ERRORs are there


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/4430ebf0
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/4430ebf0
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/4430ebf0

Branch: refs/heads/branch-alerts-dev
Commit: 4430ebf070ddef99bcb5ba610e13d5f39f5cd910
Parents: 926acf4
Author: aBabiichuk <ab...@cybervisiontech.com>
Authored: Wed Aug 27 14:32:45 2014 +0300
Committer: aBabiichuk <ab...@cybervisiontech.com>
Committed: Wed Aug 27 14:34:22 2014 +0300

----------------------------------------------------------------------
 .../app/controllers/main/service/info/configs.js   |  4 +++-
 ambari-web/app/mixins/common/serverValidator.js    | 17 +++++------------
 2 files changed, 8 insertions(+), 13 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/4430ebf0/ambari-web/app/controllers/main/service/info/configs.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/controllers/main/service/info/configs.js b/ambari-web/app/controllers/main/service/info/configs.js
index dce4c35..8eee9cb 100644
--- a/ambari-web/app/controllers/main/service/info/configs.js
+++ b/ambari-web/app/controllers/main/service/info/configs.js
@@ -1095,11 +1095,13 @@ App.MainServiceInfoConfigsController = Em.Controller.extend(App.ServerValidatorM
           App.showConfirmationPopup(function () {
             self.saveConfigs();
           }, Em.I18n.t('services.service.config.confirmDirectoryChange').format(displayName), function () {
-            self.set('isApplyingChanges', false)
+            self.set('isApplyingChanges', false);
           });
         } else {
           self.saveConfigs();
         }
+      }).fail(function() {
+        self.set('isApplyingChanges', false);
       });
     } else {
       status = 'started';

http://git-wip-us.apache.org/repos/asf/ambari/blob/4430ebf0/ambari-web/app/mixins/common/serverValidator.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/mixins/common/serverValidator.js b/ambari-web/app/mixins/common/serverValidator.js
index 462ff62..3a1dbed 100644
--- a/ambari-web/app/mixins/common/serverValidator.js
+++ b/ambari-web/app/mixins/common/serverValidator.js
@@ -236,8 +236,8 @@ App.ServerValidatorMixin = Em.Mixin.create({
               if ((property.get('filename') == item['config-type'] + '.xml') && (property.get('name') == item['config-name'])) {
                 if (item.level == "ERROR") {
                   self.set('configValidationError', true);
-                  property.set('errorMessage', item.message);
-                  property.set('error', true);
+                  property.set('warnMessage', item.message);
+                  property.set('warn', true);
                 } else if (item.level == "WARN") {
                   self.set('configValidationWarning', true);
                   property.set('warnMessage', item.message);
@@ -264,21 +264,14 @@ App.ServerValidatorMixin = Em.Mixin.create({
    * @returns {*}
    */
   warnUser: function(deferred) {
-    var self = this;
     if (this.get('configValidationFailed')) {
-      this.set('isSubmitDisabled', false);
-      this.set("isApplyingChanges", false);
       deferred.reject();
       return App.showAlertPopup(Em.I18n.t('installer.step7.popup.validation.failed.header'), Em.I18n.t('installer.step7.popup.validation.request.failed.body'));
     } else if (this.get('configValidationWarning') || this.get('configValidationError')) {
       // Motivation: for server-side validation warnings and EVEN errors allow user to continue wizard
-      this.set('isSubmitDisabled', true);
-      this.set("isApplyingChanges", false);
-      return App.showConfirmationPopup(function () {
-        self.set('isSubmitDisabled', false);
-        self.set("isApplyingChanges", true);
-        deferred.resolve();
-      }, Em.I18n.t('installer.step7.popup.validation.warning.body'));
+      return App.showConfirmationPopup(function () { deferred.resolve(); },
+          Em.I18n.t('installer.step7.popup.validation.warning.body'),
+          function () { deferred.reject(); });
     } else {
       deferred.resolve();
     }


[16/35] git commit: AMBARI-6061. OutOfMemoryError during host checks on 2k nodes cluster (dlysnichenko)

Posted by jo...@apache.org.
AMBARI-6061. OutOfMemoryError during host checks on 2k nodes cluster (dlysnichenko)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/9faeaf5a
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/9faeaf5a
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/9faeaf5a

Branch: refs/heads/branch-alerts-dev
Commit: 9faeaf5a525bfee0932c1a654b28651c0b1705dd
Parents: db83ea2
Author: Lisnichenko Dmitro <dl...@hortonworks.com>
Authored: Tue Aug 26 15:02:41 2014 +0300
Committer: Lisnichenko Dmitro <dl...@hortonworks.com>
Committed: Tue Aug 26 17:24:48 2014 +0300

----------------------------------------------------------------------
 .../server/actionmanager/ActionScheduler.java   |  33 +++++
 .../ambari/server/actionmanager/Stage.java      |  32 ++++-
 .../server/actionmanager/StageFactory.java      |   4 +-
 .../ambari/server/agent/ExecutionCommand.java   |   2 +-
 .../controller/AmbariActionExecutionHelper.java |   9 +-
 .../AmbariCustomCommandExecutionHelper.java     |  34 ++---
 .../AmbariManagementControllerImpl.java         | 128 ++++++++++---------
 .../ambari/server/orm/entities/StageEntity.java |  28 ++++
 .../ambari/server/stageplanner/RoleGraph.java   |   3 +-
 .../server/upgrade/UpgradeCatalog170.java       |   7 +-
 .../apache/ambari/server/utils/StageUtils.java  |  57 +++++----
 .../main/resources/Ambari-DDL-MySQL-CREATE.sql  |   2 +-
 .../main/resources/Ambari-DDL-Oracle-CREATE.sql |   2 +-
 .../resources/Ambari-DDL-Postgres-CREATE.sql    |   2 +-
 .../Ambari-DDL-Postgres-EMBEDDED-CREATE.sql     |   2 +-
 .../ExecutionCommandWrapperTest.java            |   2 +-
 .../actionmanager/TestActionDBAccessorImpl.java |  20 +--
 .../server/actionmanager/TestActionManager.java |  31 +++--
 .../actionmanager/TestActionScheduler.java      |  33 +++--
 .../ambari/server/actionmanager/TestStage.java  |   4 +-
 .../server/agent/TestHeartbeatHandler.java      |  15 ++-
 .../AmbariManagementControllerTest.java         |  84 ++++++++----
 .../server/stageplanner/TestStagePlanner.java   |   6 +-
 .../server/upgrade/UpgradeCatalog170Test.java   |  30 +++++
 .../ambari/server/utils/TestStageUtils.java     |  13 +-
 .../app/controllers/wizard/step3_controller.js  |   1 -
 26 files changed, 381 insertions(+), 203 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/9faeaf5a/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/ActionScheduler.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/ActionScheduler.java b/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/ActionScheduler.java
index b9a67b7..0385686 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/ActionScheduler.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/ActionScheduler.java
@@ -121,6 +121,8 @@ class ActionScheduler implements Runnable {
   private boolean activeAwakeRequest = false;
   //Cache for clusterHostinfo, key - stageId-requestId
   private Cache<String, Map<String, Set<String>>> clusterHostInfoCache;
+  private Cache<String, Map<String, String>> commandParamsStageCache;
+  private Cache<String, Map<String, String>> hostParamsStageCache;
 
   public ActionScheduler(long sleepTimeMilliSec, long actionTimeoutMilliSec,
       ActionDBAccessor db, ActionQueue actionQueue, Clusters fsmObject,
@@ -138,6 +140,12 @@ class ActionScheduler implements Runnable {
     this.clusterHostInfoCache = CacheBuilder.newBuilder().
         expireAfterAccess(5, TimeUnit.MINUTES).
         build();
+    this.commandParamsStageCache = CacheBuilder.newBuilder().
+      expireAfterAccess(5, TimeUnit.MINUTES).
+      build();
+    this.hostParamsStageCache = CacheBuilder.newBuilder().
+      expireAfterAccess(5, TimeUnit.MINUTES).
+      build();
     this.configuration = configuration;
   }
 
@@ -748,6 +756,31 @@ class ActionScheduler implements Runnable {
     }
 
     cmd.setClusterHostInfo(clusterHostInfo);
+ 
+    //Try to get commandParams from cache and merge them with command-level parameters
+    Map<String, String> commandParams = commandParamsStageCache.getIfPresent(stagePk);
+
+    if (commandParams == null){
+      Type type = new TypeToken<Map<String, String>>() {}.getType();
+      commandParams = StageUtils.getGson().fromJson(s.getCommandParamsStage(), type);
+      commandParamsStageCache.put(stagePk, commandParams);
+    }
+    Map<String, String> commandParamsCmd = cmd.getCommandParams();
+    commandParamsCmd.putAll(commandParams);
+    cmd.setCommandParams(commandParamsCmd);
+
+
+    //Try to get hostParams from cache and merge them with command-level parameters
+    Map<String, String> hostParams = hostParamsStageCache.getIfPresent(stagePk);
+    if (hostParams == null) {
+      Type type = new TypeToken<Map<String, String>>() {}.getType();
+      hostParams = StageUtils.getGson().fromJson(s.getHostParamsStage(), type);
+      hostParamsStageCache.put(stagePk, hostParams);
+    }
+    Map<String, String> hostParamsCmd = cmd.getHostLevelParams();
+    hostParamsCmd.putAll(hostParams);
+    cmd.setHostLevelParams(hostParamsCmd);
+
 
     commandsToUpdate.add(cmd);
   }

http://git-wip-us.apache.org/repos/asf/ambari/blob/9faeaf5a/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/Stage.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/Stage.java b/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/Stage.java
index c4bbb46..bbc5ac3 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/Stage.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/Stage.java
@@ -39,7 +39,6 @@ import org.apache.ambari.server.orm.entities.StageEntity;
 import org.apache.ambari.server.serveraction.ServerAction;
 import org.apache.ambari.server.state.Clusters;
 import org.apache.ambari.server.state.ServiceComponentHostEvent;
-import org.apache.ambari.server.state.fsm.event.Event;
 import org.apache.ambari.server.state.svccomphost.ServiceComponentHostUpgradeEvent;
 import org.apache.ambari.server.utils.StageUtils;
 import org.apache.commons.lang.StringUtils;
@@ -63,6 +62,8 @@ public class Stage {
   private final String logDir;
   private final String requestContext;
   private String clusterHostInfo;
+  private String commandParamsStage;
+  private String hostParamsStage;
 
   private int stageTimeout = -1;
 
@@ -83,7 +84,9 @@ public class Stage {
       @Assisted("clusterName") @Nullable String clusterName,
       @Assisted("clusterId") long clusterId,
       @Assisted("requestContext") @Nullable String requestContext,
-      @Assisted("clusterHostInfo") String clusterHostInfo) {
+      @Assisted("clusterHostInfo") String clusterHostInfo,
+      @Assisted("commandParamsStage") String commandParamsStage,
+      @Assisted("hostParamsStage") String hostParamsStage) {
     this.wrappersLoaded = true;
     this.requestId = requestId;
     this.logDir = logDir;
@@ -91,6 +94,8 @@ public class Stage {
     this.clusterId = clusterId;
     this.requestContext = requestContext == null ? "" : requestContext;
     this.clusterHostInfo = clusterHostInfo;
+    this.commandParamsStage = commandParamsStage;
+    this.hostParamsStage = hostParamsStage;
   }
 
   @AssistedInject
@@ -113,6 +118,8 @@ public class Stage {
     
     requestContext = stageEntity.getRequestContext();
     clusterHostInfo = stageEntity.getClusterHostInfo();
+    commandParamsStage = stageEntity.getCommandParamsStage();
+    hostParamsStage = stageEntity.getHostParamsStage();
 
 
     List<Long> taskIds = hostRoleCommandDAO.findTaskIdsByStage(requestId, stageId);
@@ -147,6 +154,8 @@ public class Stage {
     stageEntity.setHostRoleCommands(new ArrayList<HostRoleCommandEntity>());
     stageEntity.setRoleSuccessCriterias(new ArrayList<RoleSuccessCriteriaEntity>());
     stageEntity.setClusterHostInfo(clusterHostInfo);
+    stageEntity.setCommandParamsStage(commandParamsStage);
+    stageEntity.setHostParamsStage(hostParamsStage);
 
     for (Role role : successFactors.keySet()) {
       RoleSuccessCriteriaEntity roleSuccessCriteriaEntity = new RoleSuccessCriteriaEntity();
@@ -198,6 +207,23 @@ public class Stage {
   public void setClusterHostInfo(String clusterHostInfo) {
     this.clusterHostInfo = clusterHostInfo;
   }
+ 
+  public String getCommandParamsStage() {
+    return commandParamsStage;
+  }
+
+  public void setCommandParamsStage(String commandParamsStage) {
+    this.commandParamsStage = commandParamsStage;
+  }
+
+  public String getHostParamsStage() {
+    return hostParamsStage;
+  }
+
+  public void setHostParamsStage(String hostParamsStage) {
+    this.hostParamsStage = hostParamsStage;
+  }
+
 
   public synchronized void setStageId(long stageId) {
     if (this.stageId != -1) {
@@ -539,6 +565,8 @@ public class Stage {
     builder.append("logDir=" + logDir+"\n");
     builder.append("requestContext="+requestContext+"\n");
     builder.append("clusterHostInfo="+clusterHostInfo+"\n");
+    builder.append("commandParamsStage="+commandParamsStage+"\n");
+    builder.append("hostParamsStage="+hostParamsStage+"\n");
     builder.append("Success Factors:\n");
     for (Role r : successFactors.keySet()) {
       builder.append("  role: "+r+", factor: "+successFactors.get(r)+"\n");

http://git-wip-us.apache.org/repos/asf/ambari/blob/9faeaf5a/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/StageFactory.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/StageFactory.java b/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/StageFactory.java
index fab2e96..a88558c 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/StageFactory.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/StageFactory.java
@@ -29,7 +29,9 @@ public interface StageFactory {
       @Assisted("clusterName") String clusterName,
       @Assisted("clusterId") long clusterId,
       @Assisted("requestContext") String requestContext,
-      @Assisted("clusterHostInfo") String clusterHostInfo);
+      @Assisted("clusterHostInfo") String clusterHostInfo,
+      @Assisted("commandParamsStage") String commandParamsStage,
+      @Assisted("hostParamsStage") String hostParamsStage);
 
   Stage createExisting(StageEntity stageEntity);
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/9faeaf5a/ambari-server/src/main/java/org/apache/ambari/server/agent/ExecutionCommand.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/agent/ExecutionCommand.java b/ambari-server/src/main/java/org/apache/ambari/server/agent/ExecutionCommand.java
index fdf96df..33c5af4 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/agent/ExecutionCommand.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/agent/ExecutionCommand.java
@@ -57,7 +57,7 @@ public class ExecutionCommand extends AgentCommand {
   private Map<String, Map<String, Map<String, String>>> configurationAttributes;
   private Map<String, Map<String, String>> configurationTags;
   private Set<String> forceRefreshConfigTags = new HashSet<String>();
-  private Map<String, String> commandParams;
+  private Map<String, String> commandParams = new HashMap<String, String>();
   private String serviceName;
   private String componentName;
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/9faeaf5a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariActionExecutionHelper.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariActionExecutionHelper.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariActionExecutionHelper.java
index 64f132e..efe85e0 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariActionExecutionHelper.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariActionExecutionHelper.java
@@ -20,7 +20,6 @@ package org.apache.ambari.server.controller;
 
 import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.COMMAND_TIMEOUT;
 import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.COMPONENT_CATEGORY;
-import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.JDK_LOCATION;
 import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.SCRIPT;
 import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.SCRIPT_TYPE;
 
@@ -206,12 +205,10 @@ public class AmbariActionExecutionHelper {
    * Add tasks to the stage based on the requested action execution
    * @param actionContext the context associated with the action
    * @param stage stage into which tasks must be inserted
-   * @param hostLevelParams host level params to send with the command
    * @throws AmbariException
    */
   public void addExecutionCommandsToStage(
-          final ActionExecutionContext actionContext,
-          Stage stage, Map<String, String> hostLevelParams)
+          final ActionExecutionContext actionContext, Stage stage)
       throws AmbariException {
 
     String actionName = actionContext.getActionName();
@@ -337,9 +334,8 @@ public class AmbariActionExecutionHelper {
         configTags = managementController.findConfigurationTagsWithOverrides(cluster, hostName);
       }
 
-      Map<String, String> commandParams = actionContext.getParameters();
+      Map<String, String> commandParams = new TreeMap<String, String>();
       commandParams.put(COMMAND_TIMEOUT, actionContext.getTimeout().toString());
-      commandParams.put(JDK_LOCATION, managementController.getJdkResourceUrl());
       commandParams.put(SCRIPT, actionName + ".py");
       commandParams.put(SCRIPT_TYPE, TYPE_PYTHON);
 
@@ -353,7 +349,6 @@ public class AmbariActionExecutionHelper {
       execCmd.setConfigurations(configurations);
       execCmd.setConfigurationAttributes(configurationAttributes);
       execCmd.setConfigurationTags(configTags);
-      execCmd.setHostLevelParams(hostLevelParams);
       execCmd.setCommandParams(commandParams);
       execCmd.setServiceName(serviceName == null || serviceName.isEmpty() ?
         resourceFilter.getServiceName() : serviceName);

http://git-wip-us.apache.org/repos/asf/ambari/blob/9faeaf5a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelper.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelper.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelper.java
index 339194f..156427d 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelper.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelper.java
@@ -22,7 +22,6 @@ import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.COMMAND_T
 import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.COMPONENT_CATEGORY;
 import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.CUSTOM_COMMAND;
 import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.HOOKS_FOLDER;
-import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.JDK_LOCATION;
 import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.REPO_INFO;
 import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.SCRIPT;
 import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.SCRIPT_TYPE;
@@ -204,7 +203,7 @@ public class AmbariCustomCommandExecutionHelper {
 
   private void addCustomCommandAction(final ActionExecutionContext actionExecutionContext,
                                       final RequestResourceFilter resourceFilter,
-                                      Stage stage, Map<String, String> hostLevelParams,
+                                      Stage stage,
                                       Map<String, String> additionalCommandParams,
                                       String commandDetail)
                                       throws AmbariException {
@@ -290,6 +289,8 @@ public class AmbariCustomCommandExecutionHelper {
       if(actionExecutionContext.getParameters() != null && actionExecutionContext.getParameters().containsKey(KeyNames.REFRESH_ADITIONAL_COMPONENT_TAGS)){
         execCmd.setForceRefreshConfigTags(parseAndValidateComponentsMapping(actionExecutionContext.getParameters().get(KeyNames.REFRESH_ADITIONAL_COMPONENT_TAGS)));
       }
+ 
+      Map<String, String> hostLevelParams = new TreeMap<String, String>();
 
       hostLevelParams.put(CUSTOM_COMMAND, commandName);
       // Set parameters required for re-installing clients on restart
@@ -330,7 +331,6 @@ public class AmbariCustomCommandExecutionHelper {
       }
 
       commandParams.put(COMMAND_TIMEOUT, commandTimeout);
-      commandParams.put(JDK_LOCATION, managementController.getJdkResourceUrl());
 
       commandParams.put(SERVICE_PACKAGE_FOLDER,
           serviceInfo.getServicePackageFolder());
@@ -357,7 +357,7 @@ public class AmbariCustomCommandExecutionHelper {
 
   /**
    * splits the passed commaseparated value and returns it as set
-   * @param comma separated list
+   * @param commaSeparatedTags separated list
    * @return set of items or null
    * @throws AmbariException
    */
@@ -372,7 +372,7 @@ public class AmbariCustomCommandExecutionHelper {
   private void findHostAndAddServiceCheckAction(
           final ActionExecutionContext actionExecutionContext,
           final RequestResourceFilter resourceFilter,
-          Stage stage, Map<String, String> hostLevelParams)
+          Stage stage)
           throws AmbariException {
 
     String clusterName = actionExecutionContext.getClusterName();
@@ -438,8 +438,7 @@ public class AmbariCustomCommandExecutionHelper {
     }
 
     addServiceCheckAction(stage, hostName, smokeTestRole, nowTimestamp,
-        serviceName, componentName, actionParameters,
-        hostLevelParams);
+        serviceName, componentName, actionParameters);
   }
 
   /**
@@ -452,8 +451,7 @@ public class AmbariCustomCommandExecutionHelper {
                                     long nowTimestamp,
                                     String serviceName,
                                     String componentName,
-                                    Map<String, String> actionParameters,
-                                    Map<String, String> hostLevelParams)
+                                    Map<String, String> actionParameters)
                                     throws AmbariException {
 
     String clusterName = stage.getClusterName();
@@ -496,11 +494,6 @@ public class AmbariCustomCommandExecutionHelper {
     execCmd.setClusterHostInfo(
         StageUtils.getClusterHostInfo(clusters.getHostsForCluster(clusterName), cluster));
 
-    if (hostLevelParams == null) {
-      hostLevelParams = new TreeMap<String, String>();
-    }
-    execCmd.setHostLevelParams(hostLevelParams);
-
     Map<String, String> commandParams = new TreeMap<String, String>();
 
     String commandTimeout = configs.getDefaultAgentTaskTimeout();
@@ -525,7 +518,6 @@ public class AmbariCustomCommandExecutionHelper {
     }
 
     commandParams.put(COMMAND_TIMEOUT, commandTimeout);
-    commandParams.put(JDK_LOCATION, managementController.getJdkResourceUrl());
 
     commandParams.put(SERVICE_PACKAGE_FOLDER,
         serviceInfo.getServicePackageFolder());
@@ -557,7 +549,7 @@ public class AmbariCustomCommandExecutionHelper {
    */
   private void addDecommissionAction(final ActionExecutionContext actionExecutionContext,
                                      final RequestResourceFilter resourceFilter,
-                                     Stage stage, Map<String, String> hostLevelParams)
+                                     Stage stage)
                                      throws AmbariException {
 
     String clusterName = actionExecutionContext.getClusterName();
@@ -741,7 +733,7 @@ public class AmbariCustomCommandExecutionHelper {
       if (!serviceName.equals(Service.Type.HBASE.name()) || hostName.equals(primaryCandidate)) {
         commandParams.put(UPDATE_EXCLUDE_FILE_ONLY, "false");
         addCustomCommandAction(commandContext, commandFilter, stage,
-          hostLevelParams, commandParams, commandDetail.toString());
+          commandParams, commandDetail.toString());
       }
     }
   }
@@ -800,12 +792,10 @@ public class AmbariCustomCommandExecutionHelper {
    * Other than Service_Check and Decommission all other commands are pass-through
    * @param actionExecutionContext received request to execute a command
    * @param stage the initial stage for task creation
-   * @param hostLevelParams specific parameters for the hosts
    * @throws AmbariException
    */
   public void addExecutionCommandsToStage(ActionExecutionContext actionExecutionContext,
                                           Stage stage,
-                                          Map<String, String> hostLevelParams,
                                           Map<String, String> requestParams)
                                           throws AmbariException {
 
@@ -819,9 +809,9 @@ public class AmbariCustomCommandExecutionHelper {
 
       if (actionExecutionContext.getActionName().contains(SERVICE_CHECK_COMMAND_NAME)) {
         findHostAndAddServiceCheckAction(actionExecutionContext,
-          resourceFilter, stage, hostLevelParams);
+          resourceFilter, stage);
       } else if (actionExecutionContext.getActionName().equals(DECOMMISSION_COMMAND_NAME)) {
-        addDecommissionAction(actionExecutionContext, resourceFilter, stage, hostLevelParams);
+        addDecommissionAction(actionExecutionContext, resourceFilter, stage);
       } else if (isValidCustomCommand(actionExecutionContext, resourceFilter)) {
         String commandDetail = getReadableCustomCommandDetail(actionExecutionContext, resourceFilter);
 
@@ -838,7 +828,7 @@ public class AmbariCustomCommandExecutionHelper {
           actionExecutionContext.getParameters().put(KeyNames.REFRESH_ADITIONAL_COMPONENT_TAGS, requestParams.get(KeyNames.REFRESH_ADITIONAL_COMPONENT_TAGS));
         }
         addCustomCommandAction(actionExecutionContext, resourceFilter, stage,
-          hostLevelParams, extraParams, commandDetail);
+          extraParams, commandDetail);
       } else {
         throw new AmbariException("Unsupported action " +
           actionExecutionContext.getActionName());

http://git-wip-us.apache.org/repos/asf/ambari/blob/9faeaf5a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java
index 3b80515..c465189 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java
@@ -18,13 +18,48 @@
 
 package org.apache.ambari.server.controller;
 
-import com.google.common.cache.Cache;
-import com.google.common.cache.CacheBuilder;
-import com.google.gson.Gson;
-import com.google.inject.Inject;
-import com.google.inject.Injector;
-import com.google.inject.Singleton;
-import com.google.inject.persist.Transactional;
+import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.AMBARI_DB_RCA_DRIVER;
+import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.AMBARI_DB_RCA_PASSWORD;
+import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.AMBARI_DB_RCA_URL;
+import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.AMBARI_DB_RCA_USERNAME;
+import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.COMMAND_TIMEOUT;
+import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.DB_DRIVER_FILENAME;
+import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.DB_NAME;
+import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.HOOKS_FOLDER;
+import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.JAVA_HOME;
+import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.JCE_NAME;
+import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.JDK_LOCATION;
+import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.JDK_NAME;
+import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.MYSQL_JDBC_URL;
+import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.ORACLE_JDBC_URL;
+import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.PACKAGE_LIST;
+import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.REPO_INFO;
+import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.SCRIPT;
+import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.SCRIPT_TYPE;
+import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.SERVICE_PACKAGE_FOLDER;
+import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.SERVICE_REPO_INFO;
+import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.STACK_NAME;
+import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.STACK_VERSION;
+
+import java.io.File;
+import java.io.IOException;
+import java.net.InetAddress;
+import java.text.MessageFormat;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.EnumMap;
+import java.util.EnumSet;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.LinkedHashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Set;
+import java.util.TreeMap;
+import java.util.concurrent.TimeUnit;
+
 import org.apache.ambari.server.AmbariException;
 import org.apache.ambari.server.ClusterNotFoundException;
 import org.apache.ambari.server.DuplicateResourceException;
@@ -101,46 +136,14 @@ import org.apache.commons.lang.math.NumberUtils;
 import org.apache.http.client.utils.URIBuilder;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
-import java.io.File;
-import java.io.IOException;
-import java.net.InetAddress;
-import java.text.MessageFormat;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.EnumMap;
-import java.util.EnumSet;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.LinkedHashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Map.Entry;
-import java.util.Set;
-import java.util.TreeMap;
-import java.util.concurrent.TimeUnit;
-import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.AMBARI_DB_RCA_DRIVER;
-import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.AMBARI_DB_RCA_PASSWORD;
-import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.AMBARI_DB_RCA_URL;
-import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.AMBARI_DB_RCA_USERNAME;
-import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.COMMAND_TIMEOUT;
-import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.DB_DRIVER_FILENAME;
-import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.DB_NAME;
-import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.HOOKS_FOLDER;
-import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.JAVA_HOME;
-import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.JCE_NAME;
-import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.JDK_LOCATION;
-import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.JDK_NAME;
-import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.MYSQL_JDBC_URL;
-import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.ORACLE_JDBC_URL;
-import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.PACKAGE_LIST;
-import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.REPO_INFO;
-import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.SCRIPT;
-import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.SCRIPT_TYPE;
-import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.SERVICE_PACKAGE_FOLDER;
-import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.SERVICE_REPO_INFO;
-import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.STACK_NAME;
-import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.STACK_VERSION;
+
+import com.google.common.cache.Cache;
+import com.google.common.cache.CacheBuilder;
+import com.google.gson.Gson;
+import com.google.inject.Inject;
+import com.google.inject.Injector;
+import com.google.inject.Singleton;
+import com.google.inject.persist.Transactional;
 
 @Singleton
 public class AmbariManagementControllerImpl implements AmbariManagementController {
@@ -798,13 +801,16 @@ public class AmbariManagementControllerImpl implements AmbariManagementControlle
     }
   }
 
-  private Stage createNewStage(long id, Cluster cluster, long requestId, String requestContext, String clusterHostInfo) {
+  private Stage createNewStage(long id, Cluster cluster, long requestId,
+                               String requestContext, String clusterHostInfo,
+                               String commandParamsStage, String hostParamsStage) {
     String logDir = BASE_LOG_DIR + File.pathSeparator + requestId;
     Stage stage =
         stageFactory.createNew(requestId, logDir,
             null == cluster ? null : cluster.getClusterName(),
             null == cluster ? -1L : cluster.getClusterId(),
-            requestContext, clusterHostInfo);
+            requestContext, clusterHostInfo, commandParamsStage,
+            hostParamsStage);
     stage.setStageId(id);
     return stage;
   }
@@ -1582,7 +1588,7 @@ public class AmbariManagementControllerImpl implements AmbariManagementControlle
         + ", repoInfo=" + repoInfo);
     }
 
-    Map<String, String> hostParams = createDefaultHostParams(cluster);
+    Map<String, String> hostParams = new TreeMap<String, String>();
     hostParams.put(REPO_INFO, repoInfo);
     hostParams.putAll(getRcaParameters());
 
@@ -1731,9 +1737,11 @@ public class AmbariManagementControllerImpl implements AmbariManagementControlle
           clusters.getHostsForCluster(cluster.getClusterName()), cluster);
 
       String clusterHostInfoJson = StageUtils.getGson().toJson(clusterHostInfo);
+      String HostParamsJson = StageUtils.getGson().toJson(createDefaultHostParams(cluster));
 
       Stage stage = createNewStage(requestStages.getLastStageId() + 1, cluster,
-          requestStages.getId(), requestProperties.get(REQUEST_CONTEXT_PROPERTY), clusterHostInfoJson);
+          requestStages.getId(), requestProperties.get(REQUEST_CONTEXT_PROPERTY),
+          clusterHostInfoJson, "{}", HostParamsJson);
 
       //HACK
       String jobtrackerHost = getJobTrackerHost(cluster);
@@ -1945,7 +1953,7 @@ public class AmbariManagementControllerImpl implements AmbariManagementControlle
 
         customCommandExecutionHelper.addServiceCheckAction(stage, clientHost,
           smokeTestRole, nowTimestamp, serviceName,
-          null, null, createDefaultHostParams(cluster));
+          null, null);
       }
 
       RoleCommandOrder rco = getRoleCommandOrder(cluster);
@@ -2947,24 +2955,28 @@ public class AmbariManagementControllerImpl implements AmbariManagementControlle
       actionExecutionHelper.validateAction(actionRequest);
     }
 
-    Map<String, String> params = new HashMap<String, String>();
+    Map<String, String> commandParamsStage = StageUtils.getCommandParamsStage(actionExecContext);
+    Map<String, String> hostParamsStage = new HashMap<String, String>();
     Map<String, Set<String>> clusterHostInfo;
     String clusterHostInfoJson = "{}";
 
     if (null != cluster) {
       clusterHostInfo = StageUtils.getClusterHostInfo(
         clusters.getHostsForCluster(cluster.getClusterName()), cluster);
-      params = createDefaultHostParams(cluster);
+      hostParamsStage = createDefaultHostParams(cluster);
       clusterHostInfoJson = StageUtils.getGson().toJson(clusterHostInfo);
     }
 
-    Stage stage = createNewStage(0, cluster, actionManager.getNextRequestId(), requestContext, clusterHostInfoJson);
+    String hostParamsStageJson = StageUtils.getGson().toJson(hostParamsStage);
+    String commandParamsStageJson = StageUtils.getGson().toJson(commandParamsStage);
+
+    Stage stage = createNewStage(0, cluster, actionManager.getNextRequestId(), requestContext,
+      clusterHostInfoJson, commandParamsStageJson, hostParamsStageJson);
 
     if (actionRequest.isCommand()) {
-      customCommandExecutionHelper.addExecutionCommandsToStage(actionExecContext, stage,
-          params, requestProperties);
+      customCommandExecutionHelper.addExecutionCommandsToStage(actionExecContext, stage, requestProperties);
     } else {
-      actionExecutionHelper.addExecutionCommandsToStage(actionExecContext, stage, params);
+      actionExecutionHelper.addExecutionCommandsToStage(actionExecContext, stage);
     }
 
     RoleGraph rg;

http://git-wip-us.apache.org/repos/asf/ambari/blob/9faeaf5a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/StageEntity.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/StageEntity.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/StageEntity.java
index 245c9e9..a7bc948 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/StageEntity.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/StageEntity.java
@@ -51,6 +51,14 @@ public class StageEntity {
   @Column(name = "cluster_host_info")
   @Basic
   private byte[] clusterHostInfo;
+ 
+  @Column(name = "command_params")
+  @Basic
+  private byte[] commandParamsStage;
+
+  @Column(name = "host_params")
+  @Basic
+  private byte[] hostParamsStage;
 
   @ManyToOne
   @JoinColumn(name = "request_id", referencedColumnName = "request_id", nullable = false)
@@ -106,6 +114,22 @@ public class StageEntity {
   public void setClusterHostInfo(String clusterHostInfo) {
     this.clusterHostInfo = clusterHostInfo.getBytes();
   }
+ 
+  public String getCommandParamsStage() {
+    return commandParamsStage == null ? new String() : new String(commandParamsStage);
+  }
+
+  public void setCommandParamsStage(String commandParamsStage) {
+    this.commandParamsStage = commandParamsStage.getBytes();
+  }
+
+  public String getHostParamsStage() {
+    return hostParamsStage == null ? new String() : new String(hostParamsStage);
+  }
+
+  public void setHostParamsStage(String hostParamsStage) {
+    this.hostParamsStage = hostParamsStage.getBytes();
+  }
 
   public void setRequestContext(String requestContext) {
     if (requestContext != null) {
@@ -125,6 +149,8 @@ public class StageEntity {
     if (requestId != null ? !requestId.equals(that.requestId) : that.requestId != null) return false;
     if (stageId != null ? !stageId.equals(that.stageId) : that.stageId != null) return false;
     if (clusterHostInfo != null ? !clusterHostInfo.equals(that.clusterHostInfo) : that.clusterHostInfo != null) return false;
+    if (commandParamsStage != null ? !commandParamsStage.equals(that.commandParamsStage) : that.commandParamsStage != null) return false;
+    if (hostParamsStage != null ? !hostParamsStage.equals(that.hostParamsStage) : that.hostParamsStage != null) return false;
     return !(requestContext != null ? !requestContext.equals(that.requestContext) : that.requestContext != null);
 
   }
@@ -136,6 +162,8 @@ public class StageEntity {
     result = 31 * result + (stageId != null ? stageId.hashCode() : 0);
     result = 31 * result + (logInfo != null ? logInfo.hashCode() : 0);
     result = 31 * result + (clusterHostInfo != null ? clusterHostInfo.hashCode() : 0);
+    result = 31 * result + (commandParamsStage != null ? commandParamsStage.hashCode() : 0);
+    result = 31 * result + (hostParamsStage != null ? hostParamsStage.hashCode() : 0);
     result = 31 * result + (requestContext != null ? requestContext.hashCode() : 0);
     return result;
   }

http://git-wip-us.apache.org/repos/asf/ambari/blob/9faeaf5a/ambari-server/src/main/java/org/apache/ambari/server/stageplanner/RoleGraph.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/stageplanner/RoleGraph.java b/ambari-server/src/main/java/org/apache/ambari/server/stageplanner/RoleGraph.java
index 913b878..4fe3787 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/stageplanner/RoleGraph.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/stageplanner/RoleGraph.java
@@ -139,7 +139,8 @@ public class RoleGraph {
     Stage newStage = new Stage(origStage.getRequestId(),
         origStage.getLogDir(), origStage.getClusterName(),
         origStage.getClusterId(),
-        origStage.getRequestContext(), origStage.getClusterHostInfo());
+        origStage.getRequestContext(), origStage.getClusterHostInfo(),
+        origStage.getCommandParamsStage(), origStage.getHostParamsStage());
     newStage.setSuccessFactors(origStage.getSuccessFactors());
     for (RoleGraphNode rgn : stageGraphNodes) {
       for (String host : rgn.getHosts()) {

http://git-wip-us.apache.org/repos/asf/ambari/blob/9faeaf5a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog170.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog170.java b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog170.java
index 30059ac..dc3d5b8 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog170.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog170.java
@@ -73,7 +73,6 @@ import org.apache.ambari.server.state.Cluster;
 import org.apache.ambari.server.state.Clusters;
 import org.apache.ambari.server.state.Config;
 import org.apache.ambari.server.state.ConfigHelper;
-import org.apache.ambari.server.view.configuration.InstanceConfig;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -249,6 +248,12 @@ public class UpgradeCatalog170 extends AbstractUpgradeCatalog {
 
     dbAccessor.addColumn("host_role_command", new DBColumnInfo("output_log",
         String.class, 255, null, true));
+
+    dbAccessor.addColumn("stage", new DBColumnInfo("command_params",
+      byte[].class, null, null, true));
+    dbAccessor.addColumn("stage", new DBColumnInfo("host_params",
+      byte[].class, null, null, true));
+
     dbAccessor.addColumn("host_role_command", new DBColumnInfo("error_log",
         String.class, 255, null, true));
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/9faeaf5a/ambari-server/src/main/java/org/apache/ambari/server/utils/StageUtils.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/utils/StageUtils.java b/ambari-server/src/main/java/org/apache/ambari/server/utils/StageUtils.java
index 7160bcf..2a13653 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/utils/StageUtils.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/utils/StageUtils.java
@@ -17,14 +17,32 @@
  */
 package org.apache.ambari.server.utils;
 
-import com.google.common.base.Joiner;
-import com.google.gson.Gson;
+import java.io.ByteArrayInputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.net.InetAddress;
+import java.net.UnknownHostException;
+import java.nio.charset.Charset;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.LinkedHashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Set;
+import java.util.SortedSet;
+import java.util.TreeMap;
+import java.util.TreeSet;
+
+import javax.xml.bind.JAXBException;
+
 import org.apache.ambari.server.AmbariException;
 import org.apache.ambari.server.Role;
 import org.apache.ambari.server.RoleCommand;
 import org.apache.ambari.server.actionmanager.Stage;
 import org.apache.ambari.server.agent.ExecutionCommand;
-import org.apache.ambari.server.configuration.Configuration;
+import org.apache.ambari.server.controller.ActionExecutionContext;
 import org.apache.ambari.server.state.Cluster;
 import org.apache.ambari.server.state.Host;
 import org.apache.ambari.server.state.HostComponentAdminState;
@@ -40,25 +58,8 @@ import org.codehaus.jackson.map.JsonMappingException;
 import org.codehaus.jackson.map.ObjectMapper;
 import org.codehaus.jackson.map.SerializationConfig;
 
-import javax.xml.bind.JAXBException;
-import java.io.ByteArrayInputStream;
-import java.io.IOException;
-import java.io.InputStream;
-import java.net.InetAddress;
-import java.net.UnknownHostException;
-import java.nio.charset.Charset;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.LinkedHashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Map.Entry;
-import java.util.Set;
-import java.util.SortedSet;
-import java.util.TreeMap;
-import java.util.TreeSet;
+import com.google.common.base.Joiner;
+import com.google.gson.Gson;
 
 public class StageUtils {
 
@@ -148,20 +149,20 @@ public class StageUtils {
     return requestStageIds;
   }
 
-  public static Stage getATestStage(long requestId, long stageId, String clusterHostInfo) {
+  public static Stage getATestStage(long requestId, long stageId, String clusterHostInfo, String commandParamsStage, String hostParamsStage) {
     String hostname;
     try {
       hostname = InetAddress.getLocalHost().getHostName();
     } catch (UnknownHostException e) {
       hostname = "host-dummy";
     }
-    return getATestStage(requestId, stageId, hostname, clusterHostInfo);
+    return getATestStage(requestId, stageId, hostname, clusterHostInfo, commandParamsStage, hostParamsStage);
   }
 
   //For testing only
-  public static Stage getATestStage(long requestId, long stageId, String hostname, String clusterHostInfo) {
+  public static Stage getATestStage(long requestId, long stageId, String hostname, String clusterHostInfo, String commandParamsStage, String hostParamsStage) {
 
-    Stage s = new Stage(requestId, "/tmp", "cluster1", 1L, "context", clusterHostInfo);
+    Stage s = new Stage(requestId, "/tmp", "cluster1", 1L, "context", clusterHostInfo, commandParamsStage, hostParamsStage);
     s.setStageId(stageId);
     long now = System.currentTimeMillis();
     s.addHostRoleExecutionCommand(hostname, Role.NAMENODE, RoleCommand.INSTALL,
@@ -221,6 +222,10 @@ public class StageUtils {
     InputStream is = new ByteArrayInputStream(json.getBytes(Charset.forName("UTF8")));
     return mapper.readValue(is, clazz);
   }
+ 
+  public static Map<String, String> getCommandParamsStage(ActionExecutionContext actionExecContext) throws AmbariException {
+    return actionExecContext.getParameters() != null ? actionExecContext.getParameters() : new TreeMap<String, String>();
+  }
 
   public static Map<String, Set<String>> getClusterHostInfo(
       Map<String, Host> allHosts, Cluster cluster) throws AmbariException {

http://git-wip-us.apache.org/repos/asf/ambari/blob/9faeaf5a/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql
index 29fa041..2d83aeb 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql
@@ -45,7 +45,7 @@ CREATE TABLE members (member_id INTEGER, group_id INTEGER NOT NULL, user_id INTE
 CREATE TABLE execution_command (task_id BIGINT NOT NULL, command LONGBLOB, PRIMARY KEY (task_id));
 CREATE TABLE host_role_command (task_id BIGINT NOT NULL, attempt_count SMALLINT NOT NULL, event LONGTEXT NOT NULL, exitcode INTEGER NOT NULL, host_name VARCHAR(255) NOT NULL, last_attempt_time BIGINT NOT NULL, request_id BIGINT NOT NULL, role VARCHAR(255), role_command VARCHAR(255), stage_id BIGINT NOT NULL, start_time BIGINT NOT NULL, end_time BIGINT, status VARCHAR(255), std_error LONGBLOB, std_out LONGBLOB, output_log VARCHAR(255) NULL, error_log VARCHAR(255) NULL, structured_out LONGBLOB, command_detail VARCHAR(255), custom_command_name VARCHAR(255), PRIMARY KEY (task_id));
 CREATE TABLE role_success_criteria (role VARCHAR(255) NOT NULL, request_id BIGINT NOT NULL, stage_id BIGINT NOT NULL, success_factor DOUBLE NOT NULL, PRIMARY KEY (role, request_id, stage_id));
-CREATE TABLE stage (stage_id BIGINT NOT NULL, request_id BIGINT NOT NULL, cluster_id BIGINT, log_info VARCHAR(255) NOT NULL, request_context VARCHAR(255), cluster_host_info LONGBLOB, PRIMARY KEY (stage_id, request_id));
+CREATE TABLE stage (stage_id BIGINT NOT NULL, request_id BIGINT NOT NULL, cluster_id BIGINT, log_info VARCHAR(255) NOT NULL, request_context VARCHAR(255), cluster_host_info LONGBLOB, command_params LONGBLOB, host_params LONGBLOB, PRIMARY KEY (stage_id, request_id));
 CREATE TABLE request (request_id BIGINT NOT NULL, cluster_id BIGINT, request_schedule_id BIGINT, command_name VARCHAR(255), create_time BIGINT NOT NULL, end_time BIGINT NOT NULL, inputs LONGBLOB, request_context VARCHAR(255), request_type VARCHAR(255), start_time BIGINT NOT NULL, status VARCHAR(255), PRIMARY KEY (request_id));
 CREATE TABLE requestresourcefilter (filter_id BIGINT NOT NULL, request_id BIGINT NOT NULL, service_name VARCHAR(255), component_name VARCHAR(255), hosts LONGBLOB, PRIMARY KEY (filter_id));
 CREATE TABLE requestoperationlevel (operation_level_id BIGINT NOT NULL, request_id BIGINT NOT NULL, level_name VARCHAR(255), cluster_name VARCHAR(255), service_name VARCHAR(255), host_component_name VARCHAR(255), host_name VARCHAR(255), PRIMARY KEY (operation_level_id));

http://git-wip-us.apache.org/repos/asf/ambari/blob/9faeaf5a/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql
index 457404d..241bb2b 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql
@@ -36,7 +36,7 @@ CREATE TABLE members (member_id NUMBER(10), group_id NUMBER(10) NOT NULL, user_i
 CREATE TABLE execution_command (task_id NUMBER(19) NOT NULL, command BLOB NULL, PRIMARY KEY (task_id));
 CREATE TABLE host_role_command (task_id NUMBER(19) NOT NULL, attempt_count NUMBER(5) NOT NULL, event CLOB NULL, exitcode NUMBER(10) NOT NULL, host_name VARCHAR2(255) NOT NULL, last_attempt_time NUMBER(19) NOT NULL, request_id NUMBER(19) NOT NULL, role VARCHAR2(255) NULL, role_command VARCHAR2(255) NULL, stage_id NUMBER(19) NOT NULL, start_time NUMBER(19) NOT NULL, end_time NUMBER(19), status VARCHAR2(255) NULL, std_error BLOB NULL, std_out BLOB NULL, output_log VARCHAR2(255) NULL, error_log VARCHAR2(255) NULL, structured_out BLOB NULL,  command_detail VARCHAR2(255) NULL, custom_command_name VARCHAR2(255) NULL, PRIMARY KEY (task_id));
 CREATE TABLE role_success_criteria (role VARCHAR2(255) NOT NULL, request_id NUMBER(19) NOT NULL, stage_id NUMBER(19) NOT NULL, success_factor NUMBER(19,4) NOT NULL, PRIMARY KEY (role, request_id, stage_id));
-CREATE TABLE stage (stage_id NUMBER(19) NOT NULL, request_id NUMBER(19) NOT NULL, cluster_id NUMBER(19) NULL, log_info VARCHAR2(255) NULL, request_context VARCHAR2(255) NULL, cluster_host_info BLOB NOT NULL, PRIMARY KEY (stage_id, request_id));
+CREATE TABLE stage (stage_id NUMBER(19) NOT NULL, request_id NUMBER(19) NOT NULL, cluster_id NUMBER(19) NULL, log_info VARCHAR2(255) NULL, request_context VARCHAR2(255) NULL, cluster_host_info BLOB NOT NULL, command_params BLOB, host_params BLOB, PRIMARY KEY (stage_id, request_id));
 CREATE TABLE request (request_id NUMBER(19) NOT NULL, cluster_id NUMBER(19), request_schedule_id NUMBER(19), command_name VARCHAR(255), create_time NUMBER(19) NOT NULL, end_time NUMBER(19) NOT NULL, inputs BLOB, request_context VARCHAR(255), request_type VARCHAR(255), start_time NUMBER(19) NOT NULL, status VARCHAR(255), PRIMARY KEY (request_id));
 CREATE TABLE requestresourcefilter (filter_id NUMBER(19) NOT NULL, request_id NUMBER(19) NOT NULL, service_name VARCHAR2(255), component_name VARCHAR2(255), hosts BLOB, PRIMARY KEY (filter_id));
 CREATE TABLE requestoperationlevel (operation_level_id NUMBER(19) NOT NULL, request_id NUMBER(19) NOT NULL, level_name VARCHAR2(255), cluster_name VARCHAR2(255), service_name VARCHAR2(255), host_component_name VARCHAR2(255), host_name VARCHAR2(255), PRIMARY KEY (operation_level_id));

http://git-wip-us.apache.org/repos/asf/ambari/blob/9faeaf5a/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql
index b58fd21..ddea71c 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql
@@ -57,7 +57,7 @@ CREATE TABLE host_role_command (task_id BIGINT NOT NULL, attempt_count SMALLINT
 
 CREATE TABLE role_success_criteria (role VARCHAR(255) NOT NULL, request_id BIGINT NOT NULL, stage_id BIGINT NOT NULL, success_factor FLOAT NOT NULL, PRIMARY KEY (role, request_id, stage_id));
 
-CREATE TABLE stage (stage_id BIGINT NOT NULL, request_id BIGINT NOT NULL, cluster_id BIGINT NOT NULL, log_info VARCHAR(255) NOT NULL, request_context VARCHAR(255), cluster_host_info BYTEA NOT NULL, PRIMARY KEY (stage_id, request_id));
+CREATE TABLE stage (stage_id BIGINT NOT NULL, request_id BIGINT NOT NULL, cluster_id BIGINT NOT NULL, log_info VARCHAR(255) NOT NULL, request_context VARCHAR(255), cluster_host_info BYTEA NOT NULL, command_params BYTEA, host_params BYTEA, PRIMARY KEY (stage_id, request_id));
 
 CREATE TABLE request (request_id BIGINT NOT NULL, cluster_id BIGINT, command_name VARCHAR(255), create_time BIGINT NOT NULL, end_time BIGINT NOT NULL, inputs BYTEA, request_context VARCHAR(255), request_type VARCHAR(255), request_schedule_id BIGINT, start_time BIGINT NOT NULL, status VARCHAR(255), PRIMARY KEY (request_id));
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/9faeaf5a/ambari-server/src/main/resources/Ambari-DDL-Postgres-EMBEDDED-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-Postgres-EMBEDDED-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-Postgres-EMBEDDED-CREATE.sql
index 664bbe9..17b95c2 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-Postgres-EMBEDDED-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-Postgres-EMBEDDED-CREATE.sql
@@ -88,7 +88,7 @@ GRANT ALL PRIVILEGES ON TABLE ambari.host_role_command TO :username;
 CREATE TABLE ambari.role_success_criteria (role VARCHAR(255) NOT NULL, request_id BIGINT NOT NULL, stage_id BIGINT NOT NULL, success_factor FLOAT NOT NULL, PRIMARY KEY (role, request_id, stage_id));
 GRANT ALL PRIVILEGES ON TABLE ambari.role_success_criteria TO :username;
 
-CREATE TABLE ambari.stage (stage_id BIGINT NOT NULL, request_id BIGINT NOT NULL, cluster_id BIGINT NOT NULL, log_info VARCHAR(255) NOT NULL, request_context VARCHAR(255), cluster_host_info BYTEA NOT NULL, PRIMARY KEY (stage_id, request_id));
+CREATE TABLE ambari.stage (stage_id BIGINT NOT NULL, request_id BIGINT NOT NULL, cluster_id BIGINT NOT NULL, log_info VARCHAR(255) NOT NULL, request_context VARCHAR(255), cluster_host_info BYTEA NOT NULL, command_params BYTEA, host_params BYTEA, PRIMARY KEY (stage_id, request_id));
 GRANT ALL PRIVILEGES ON TABLE ambari.stage TO :username;
 
 CREATE TABLE ambari.request (request_id BIGINT NOT NULL, cluster_id BIGINT, command_name VARCHAR(255), create_time BIGINT NOT NULL, end_time BIGINT NOT NULL, inputs BYTEA, request_context VARCHAR(255), request_type VARCHAR(255), request_schedule_id BIGINT, start_time BIGINT NOT NULL, status VARCHAR(255), PRIMARY KEY (request_id));

http://git-wip-us.apache.org/repos/asf/ambari/blob/9faeaf5a/ambari-server/src/test/java/org/apache/ambari/server/actionmanager/ExecutionCommandWrapperTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/actionmanager/ExecutionCommandWrapperTest.java b/ambari-server/src/test/java/org/apache/ambari/server/actionmanager/ExecutionCommandWrapperTest.java
index 034ac5b..948f137 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/actionmanager/ExecutionCommandWrapperTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/actionmanager/ExecutionCommandWrapperTest.java
@@ -155,7 +155,7 @@ public class ExecutionCommandWrapperTest {
   
   private static void createTask(ActionDBAccessor db, long requestId, long stageId, String hostName, String clusterName) throws AmbariException {
     
-    Stage s = new Stage(requestId, "/var/log", clusterName, 1L, "execution command wrapper test", "clusterHostInfo");
+    Stage s = new Stage(requestId, "/var/log", clusterName, 1L, "execution command wrapper test", "clusterHostInfo", "commandParamsStage", "hostParamsStage");
     s.setStageId(stageId);
     s.addHostRoleExecutionCommand(hostName, Role.NAMENODE,
         RoleCommand.START,

http://git-wip-us.apache.org/repos/asf/ambari/blob/9faeaf5a/ambari-server/src/test/java/org/apache/ambari/server/actionmanager/TestActionDBAccessorImpl.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/actionmanager/TestActionDBAccessorImpl.java b/ambari-server/src/test/java/org/apache/ambari/server/actionmanager/TestActionDBAccessorImpl.java
index 2850897..6d4f056 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/actionmanager/TestActionDBAccessorImpl.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/actionmanager/TestActionDBAccessorImpl.java
@@ -16,15 +16,15 @@
  * limitations under the License.
  */
 package org.apache.ambari.server.actionmanager;
+ 
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
 
 import java.util.ArrayList;
 import java.util.Collections;
 import java.util.List;
 
-import com.google.inject.persist.UnitOfWork;
-
-import junit.framework.Assert;
-
 import org.apache.ambari.server.AmbariException;
 import org.apache.ambari.server.Role;
 import org.apache.ambari.server.RoleCommand;
@@ -52,8 +52,9 @@ import com.google.inject.Guice;
 import com.google.inject.Inject;
 import com.google.inject.Injector;
 import com.google.inject.persist.PersistService;
+import com.google.inject.persist.UnitOfWork;
 
-import static org.junit.Assert.*;
+import junit.framework.Assert;
 
 public class TestActionDBAccessorImpl {
   private static final Logger log = LoggerFactory.getLogger(TestActionDBAccessorImpl.class);
@@ -348,7 +349,8 @@ public class TestActionDBAccessorImpl {
 
   @Test
   public void testAbortRequest() throws AmbariException {
-    Stage s = new Stage(requestId, "/a/b", "cluster1", 1L, "action db accessor test", "clusterHostInfo");
+    Stage s = new Stage(requestId, "/a/b", "cluster1", 1L, "action db accessor test",
+      "clusterHostInfo", "commandParamsStage", "hostParamsStage");
     s.setStageId(stageId);
 
     clusters.addHost("host2");
@@ -412,7 +414,8 @@ public class TestActionDBAccessorImpl {
   }
 
   private Stage createStubStage(String hostname, long requestId, long stageId) {
-    Stage s = new Stage(requestId, "/a/b", "cluster1", 1L, "action db accessor test", "clusterHostInfo");
+    Stage s = new Stage(requestId, "/a/b", "cluster1", 1L, "action db accessor test",
+      "clusterHostInfo", "commandParamsStage", "hostParamsStage");
     s.setStageId(stageId);
     s.addHostRoleExecutionCommand(hostname, Role.HBASE_MASTER,
         RoleCommand.START,
@@ -429,7 +432,8 @@ public class TestActionDBAccessorImpl {
 
   private void populateActionDBWithCustomAction(ActionDBAccessor db, String hostname,
                                 long requestId, long stageId) throws AmbariException {
-    Stage s = new Stage(requestId, "/a/b", "cluster1", 1L, "action db accessor test", "");
+    Stage s = new Stage(requestId, "/a/b", "cluster1", 1L, "action db accessor test",
+      "", "commandParamsStage", "hostParamsStage");
     s.setStageId(stageId);
     s.addHostRoleExecutionCommand(hostname, Role.valueOf(actionName),
         RoleCommand.ACTIONEXECUTE,

http://git-wip-us.apache.org/repos/asf/ambari/blob/9faeaf5a/ambari-server/src/test/java/org/apache/ambari/server/actionmanager/TestActionManager.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/actionmanager/TestActionManager.java b/ambari-server/src/test/java/org/apache/ambari/server/actionmanager/TestActionManager.java
index 39bc762..5a2c467 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/actionmanager/TestActionManager.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/actionmanager/TestActionManager.java
@@ -17,17 +17,24 @@
  */
 package org.apache.ambari.server.actionmanager;
 
-import com.google.inject.Guice;
-import com.google.inject.Injector;
-import com.google.inject.persist.PersistService;
-import com.google.inject.persist.UnitOfWork;
-import junit.framework.Assert;
+import static org.easymock.EasyMock.createNiceMock;
+import static org.easymock.EasyMock.createStrictMock;
+import static org.easymock.EasyMock.expect;
+import static org.easymock.EasyMock.replay;
+import static org.easymock.EasyMock.verify;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertSame;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+
 import org.apache.ambari.server.AmbariException;
 import org.apache.ambari.server.Role;
 import org.apache.ambari.server.RoleCommand;
 import org.apache.ambari.server.agent.ActionQueue;
 import org.apache.ambari.server.agent.CommandReport;
-import org.apache.ambari.server.api.services.BaseRequest;
 import org.apache.ambari.server.controller.HostsMap;
 import org.apache.ambari.server.orm.GuiceJpaInitializer;
 import org.apache.ambari.server.orm.InMemoryDefaultTestModule;
@@ -39,12 +46,12 @@ import org.junit.Before;
 import org.junit.Ignore;
 import org.junit.Test;
 
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.List;
+import com.google.inject.Guice;
+import com.google.inject.Injector;
+import com.google.inject.persist.PersistService;
+import com.google.inject.persist.UnitOfWork;
 
-import static org.easymock.EasyMock.*;
-import static org.junit.Assert.*;
+import junit.framework.Assert;
 
 public class TestActionManager {
 
@@ -160,7 +167,7 @@ public class TestActionManager {
   }
 
   private void populateActionDB(ActionDBAccessor db, String hostname) throws AmbariException {
-    Stage s = new Stage(requestId, "/a/b", "cluster1", 1L, "action manager test", "clusterHostInfo");
+    Stage s = new Stage(requestId, "/a/b", "cluster1", 1L, "action manager test", "clusterHostInfo", "commandParamsStage", "hostParamsStage");
     s.setStageId(stageId);
     s.addHostRoleExecutionCommand(hostname, Role.HBASE_MASTER,
         RoleCommand.START,

http://git-wip-us.apache.org/repos/asf/ambari/blob/9faeaf5a/ambari-server/src/test/java/org/apache/ambari/server/actionmanager/TestActionScheduler.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/actionmanager/TestActionScheduler.java b/ambari-server/src/test/java/org/apache/ambari/server/actionmanager/TestActionScheduler.java
index a536bef..c4a88cf 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/actionmanager/TestActionScheduler.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/actionmanager/TestActionScheduler.java
@@ -127,7 +127,8 @@ public class TestActionScheduler {
 
     ActionDBAccessor db = mock(ActionDBAccessorImpl.class);
     List<Stage> stages = new ArrayList<Stage>();
-    Stage s = StageUtils.getATestStage(1, 977, hostname, CLUSTER_HOST_INFO);
+    Stage s = StageUtils.getATestStage(1, 977, hostname, CLUSTER_HOST_INFO,
+      "{\"host_param\":\"param_value\"}", "{\"stage_param\":\"param_value\"}");
     stages.add(s);
     when(db.getStagesInProgress()).thenReturn(stages);
 
@@ -207,7 +208,8 @@ public class TestActionScheduler {
     when(host.getHostName()).thenReturn(hostname);
 
     List<Stage> stages = new ArrayList<Stage>();
-    final Stage s = StageUtils.getATestStage(1, 977, hostname, CLUSTER_HOST_INFO);
+    final Stage s = StageUtils.getATestStage(1, 977, hostname, CLUSTER_HOST_INFO,
+      "{\"host_param\":\"param_value\"}", "{\"stage_param\":\"param_value\"}");
     stages.add(s);
 
     ActionDBAccessor db = mock(ActionDBAccessor.class);
@@ -271,7 +273,8 @@ public class TestActionScheduler {
     when(host.getHostName()).thenReturn(hostname);
 
     List<Stage> stages = new ArrayList<Stage>();
-    final Stage s = StageUtils.getATestStage(1, 977, hostname, CLUSTER_HOST_INFO);
+    final Stage s = StageUtils.getATestStage(1, 977, hostname, CLUSTER_HOST_INFO,
+      "{\"host_param\":\"param_value\"}", "{\"stage_param\":\"param_value\"}");
     stages.add(s);
 
     ActionDBAccessor db = mock(ActionDBAccessor.class);
@@ -349,7 +352,7 @@ public class TestActionScheduler {
 
     final List<Stage> stages = new ArrayList<Stage>();
     Stage stage = new Stage(1, "/tmp", "cluster1", 1L, "stageWith2Tasks",
-      CLUSTER_HOST_INFO);
+      CLUSTER_HOST_INFO, "", "");
     addInstallTaskToStage(stage, hostname1, "cluster1", Role.DATANODE,
       RoleCommand.INSTALL, Service.Type.HDFS, 1);
     addInstallTaskToStage(stage, hostname2, "cluster1", Role.NAMENODE,
@@ -575,7 +578,8 @@ public class TestActionScheduler {
 
   private static Stage getStageWithServerAction(long requestId, long stageId, String hostName,
                                                 Map<String, String> payload, String requestContext) {
-    Stage stage = new Stage(requestId, "/tmp", "cluster1", 1L, requestContext, CLUSTER_HOST_INFO);
+    Stage stage = new Stage(requestId, "/tmp", "cluster1", 1L, requestContext, CLUSTER_HOST_INFO,
+      "", "");
     stage.setStageId(stageId);
     long now = System.currentTimeMillis();
     stage.addServerActionCommand(ServerAction.Command.FINALIZE_UPGRADE, Role.AMBARI_SERVER_ACTION,
@@ -989,7 +993,7 @@ public class TestActionScheduler {
 
     long now = System.currentTimeMillis();
     Stage stage = new Stage(1, "/tmp", "cluster1", 1L,
-        "testRequestFailureBasedOnSuccessFactor", CLUSTER_HOST_INFO);
+        "testRequestFailureBasedOnSuccessFactor", CLUSTER_HOST_INFO, "", "");
     stage.setStageId(1);
 
     addHostRoleExecutionCommand(now, stage, Role.SQOOP, Service.Type.SQOOP,
@@ -1174,7 +1178,8 @@ public class TestActionScheduler {
     final List<Stage> stages = new ArrayList<Stage>();
 
     long now = System.currentTimeMillis();
-    Stage stage = new Stage(1, "/tmp", "cluster1", 1L, "testRequestFailureBasedOnSuccessFactor", CLUSTER_HOST_INFO);
+    Stage stage = new Stage(1, "/tmp", "cluster1", 1L, "testRequestFailureBasedOnSuccessFactor",
+      CLUSTER_HOST_INFO, "", "");
     stage.setStageId(1);
     stage.addHostRoleExecutionCommand("host1", Role.DATANODE, RoleCommand.UPGRADE,
         new ServiceComponentHostUpgradeEvent(Role.DATANODE.toString(), "host1", now, "HDP-0.2"),
@@ -1310,7 +1315,8 @@ public class TestActionScheduler {
   private Stage getStageWithSingleTask(String hostname, String clusterName, Role role,
                                        RoleCommand roleCommand, Service.Type service, int taskId,
                                        int stageId, int requestId) {
-    Stage stage = new Stage(requestId, "/tmp", clusterName, 1L, "getStageWithSingleTask", CLUSTER_HOST_INFO);
+    Stage stage = new Stage(requestId, "/tmp", clusterName, 1L, "getStageWithSingleTask",
+      CLUSTER_HOST_INFO, "{\"host_param\":\"param_value\"}", "{\"stage_param\":\"param_value\"}");
     stage.setStageId(stageId);
     stage.addHostRoleExecutionCommand(hostname, role, roleCommand,
         new ServiceComponentHostUpgradeEvent(role.toString(), hostname, System.currentTimeMillis(), "HDP-0.2"),
@@ -1354,7 +1360,8 @@ public class TestActionScheduler {
 
   @Test
   public void testSuccessFactors() {
-    Stage s = StageUtils.getATestStage(1, 1, CLUSTER_HOST_INFO);
+    Stage s = StageUtils.getATestStage(1, 1, CLUSTER_HOST_INFO,
+      "{\"host_param\":\"param_value\"}", "{\"stage_param\":\"param_value\"}");
     assertEquals(new Float(0.5), new Float(s.getSuccessFactor(Role.DATANODE)));
     assertEquals(new Float(0.5), new Float(s.getSuccessFactor(Role.TASKTRACKER)));
     assertEquals(new Float(0.5), new Float(s.getSuccessFactor(Role.GANGLIA_MONITOR)));
@@ -1427,8 +1434,10 @@ public class TestActionScheduler {
 
 
     ActionDBAccessor db = mock(ActionDBAccessorImpl.class);
-    Stage s1 = StageUtils.getATestStage(requestId1, stageId, hostname, CLUSTER_HOST_INFO);
-    Stage s2 = StageUtils.getATestStage(requestId2, stageId, hostname, CLUSTER_HOST_INFO_UPDATED);
+    Stage s1 = StageUtils.getATestStage(requestId1, stageId, hostname, CLUSTER_HOST_INFO,
+      "{\"host_param\":\"param_value\"}", "{\"stage_param\":\"param_value\"}");
+    Stage s2 = StageUtils.getATestStage(requestId2, stageId, hostname, CLUSTER_HOST_INFO_UPDATED,
+      "{\"host_param\":\"param_value\"}", "{\"stage_param\":\"param_value\"}");
     when(db.getStagesInProgress()).thenReturn(Collections.singletonList(s1));
 
     //Keep large number of attempts so that the task is not expired finally
@@ -1499,7 +1508,7 @@ public class TestActionScheduler {
 
     final List<Stage> stages = new ArrayList<Stage>();
     Stage stage1 = new Stage(1, "/tmp", "cluster1", 1L, "stageWith2Tasks",
-            CLUSTER_HOST_INFO);
+            CLUSTER_HOST_INFO, "", "");
     addInstallTaskToStage(stage1, hostname1, "cluster1", Role.HBASE_MASTER,
             RoleCommand.INSTALL, Service.Type.HBASE, 1);
     addInstallTaskToStage(stage1, hostname1, "cluster1", Role.HBASE_REGIONSERVER,

http://git-wip-us.apache.org/repos/asf/ambari/blob/9faeaf5a/ambari-server/src/test/java/org/apache/ambari/server/actionmanager/TestStage.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/actionmanager/TestStage.java b/ambari-server/src/test/java/org/apache/ambari/server/actionmanager/TestStage.java
index ba6286f..bde19a1 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/actionmanager/TestStage.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/actionmanager/TestStage.java
@@ -34,7 +34,7 @@ public class TestStage {
 
   @Test
   public void testTaskTimeout() {
-    Stage s = StageUtils.getATestStage(1, 1, "h1", CLUSTER_HOST_INFO);
+    Stage s = StageUtils.getATestStage(1, 1, "h1", CLUSTER_HOST_INFO, "{\"host_param\":\"param_value\"}", "{\"stage_param\":\"param_value\"}");
     s.addHostRoleExecutionCommand("h1", Role.DATANODE, RoleCommand.INSTALL,
         null, "c1", "HDFS");
     s.addHostRoleExecutionCommand("h1", Role.HBASE_MASTER, RoleCommand.INSTALL,
@@ -50,7 +50,7 @@ public class TestStage {
   @Test
   public void testGetRequestContext() {
 
-    Stage stage = new Stage(1, "/logDir", "c1", 1L, "My Context", CLUSTER_HOST_INFO);
+    Stage stage = new Stage(1, "/logDir", "c1", 1L, "My Context", CLUSTER_HOST_INFO, "", "");
     assertEquals("My Context", stage.getRequestContext());
     assertEquals(CLUSTER_HOST_INFO, stage.getClusterHostInfo());
   }

http://git-wip-us.apache.org/repos/asf/ambari/blob/9faeaf5a/ambari-server/src/test/java/org/apache/ambari/server/agent/TestHeartbeatHandler.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/agent/TestHeartbeatHandler.java b/ambari-server/src/test/java/org/apache/ambari/server/agent/TestHeartbeatHandler.java
index 5c4a4f1..8c975d6 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/agent/TestHeartbeatHandler.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/agent/TestHeartbeatHandler.java
@@ -32,11 +32,16 @@ import static org.apache.ambari.server.agent.DummyHeartbeatConstants.HDFS;
 import static org.apache.ambari.server.agent.DummyHeartbeatConstants.HDFS_CLIENT;
 import static org.apache.ambari.server.agent.DummyHeartbeatConstants.NAMENODE;
 import static org.apache.ambari.server.agent.DummyHeartbeatConstants.SECONDARY_NAMENODE;
+import static org.easymock.EasyMock.anyObject;
+import static org.easymock.EasyMock.createMockBuilder;
+import static org.easymock.EasyMock.createNiceMock;
+import static org.easymock.EasyMock.expect;
+import static org.easymock.EasyMock.replay;
+import static org.easymock.EasyMock.reset;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertTrue;
 import static org.junit.Assert.fail;
-import static org.mockito.Matchers.anyList;
 import static org.mockito.Matchers.anyString;
 import static org.mockito.Mockito.mock;
 import static org.mockito.Mockito.when;
@@ -91,7 +96,6 @@ import org.apache.ambari.server.state.svccomphost.ServiceComponentHostStartEvent
 import org.apache.ambari.server.state.svccomphost.ServiceComponentHostUpgradeEvent;
 import org.apache.ambari.server.utils.StageUtils;
 import org.codehaus.jackson.JsonGenerationException;
-import static org.easymock.EasyMock.*;
 import org.junit.After;
 import org.junit.Before;
 import org.junit.Test;
@@ -699,7 +703,8 @@ public class TestHeartbeatHandler {
   }
 
   private void populateActionDB(ActionDBAccessor db, String DummyHostname1) throws AmbariException {
-    Stage s = new Stage(requestId, "/a/b", DummyCluster, 1L, "heartbeat handler test", "clusterHostInfo");
+    Stage s = new Stage(requestId, "/a/b", DummyCluster, 1L, "heartbeat handler test",
+      "clusterHostInfo", "commandParamsStage", "hostParamsStage");
     s.setStageId(stageId);
     String filename = null;
     s.addHostRoleExecutionCommand(DummyHostname1, Role.HBASE_MASTER,
@@ -1084,7 +1089,7 @@ public class TestHeartbeatHandler {
     serviceComponentHost1.setState(State.INSTALLING);
 
     Stage s = new Stage(1, "/a/b", "cluster1", 1L, "action manager test",
-      "clusterHostInfo");
+      "clusterHostInfo", "commandParamsStage", "hostParamsStage");
     s.setStageId(1);
     s.addHostRoleExecutionCommand(DummyHostname1, Role.DATANODE, RoleCommand.INSTALL,
       new ServiceComponentHostInstallEvent(Role.DATANODE.toString(),
@@ -1657,7 +1662,7 @@ public class TestHeartbeatHandler {
     serviceComponentHost2.setStackVersion(stack122);
 
     Stage s = new Stage(requestId, "/a/b", "cluster1", 1L, "action manager test",
-      "clusterHostInfo");
+      "clusterHostInfo", "commandParamsStage", "hostParamsStage");
     s.setStageId(stageId);
     s.addHostRoleExecutionCommand(DummyHostname1, Role.DATANODE, RoleCommand.UPGRADE,
       new ServiceComponentHostUpgradeEvent(Role.DATANODE.toString(),

http://git-wip-us.apache.org/repos/asf/ambari/blob/9faeaf5a/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java
index 3aafb9a..ca76dc5 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java
@@ -3130,12 +3130,15 @@ public class AmbariManagementControllerTest {
     Assert.assertNull(stage1.getExecutionCommandWrapper(host2, "DATANODE"));
     Assert.assertNotNull(stage3.getExecutionCommandWrapper(host1, "HBASE_SERVICE_CHECK"));
     Assert.assertNotNull(stage2.getExecutionCommandWrapper(host2, "HDFS_SERVICE_CHECK"));
+ 
+    Type type = new TypeToken<Map<String, String>>() {}.getType();
+
 
     for (Stage s : stages) {
       for (List<ExecutionCommandWrapper> list : s.getExecutionCommands().values()) {
         for (ExecutionCommandWrapper ecw : list) {
           if (ecw.getExecutionCommand().getRole().contains("SERVICE_CHECK")) {
-            Map<String, String> hostParams = ecw.getExecutionCommand().getHostLevelParams();
+            Map<String, String> hostParams = StageUtils.getGson().fromJson(s.getHostParamsStage(), type);
             Assert.assertNotNull(hostParams);
             Assert.assertTrue(hostParams.size() > 0);
             Assert.assertTrue(hostParams.containsKey("stack_version"));
@@ -3936,12 +3939,14 @@ public class AmbariManagementControllerTest {
     Assert.assertEquals("a1", task.getRole().name());
     Assert.assertEquals("h1", task.getHostName());
     ExecutionCommand cmd = task.getExecutionCommandWrapper().getExecutionCommand();
-    Map<String, String> commandParameters = cmd.getCommandParams();
+    Type type = new TypeToken<Map<String, String>>(){}.getType();
+    Map<String, String> hostParametersStage = StageUtils.getGson().fromJson(stage.getHostParamsStage(), type);
+    Map<String, String> commandParametersStage = StageUtils.getGson().fromJson(stage.getCommandParamsStage(), type);
 
-    Assert.assertTrue(commandParameters.containsKey("test"));
+    Assert.assertTrue(commandParametersStage.containsKey("test"));
     Assert.assertEquals("HDFS", cmd.getServiceName());
     Assert.assertEquals("DATANODE", cmd.getComponentName());
-    Assert.assertNotNull(commandParameters.get("jdk_location"));
+    Assert.assertNotNull(hostParametersStage.get("jdk_location"));
 
     resourceFilters.clear();
     resourceFilter = new RequestResourceFilter("", "", null);
@@ -3965,9 +3970,9 @@ public class AmbariManagementControllerTest {
     Assert.assertEquals(expectedHosts, actualHosts);
 
     cmd = task.getExecutionCommandWrapper().getExecutionCommand();
-    commandParameters = cmd.getCommandParams();
+    commandParametersStage = StageUtils.getGson().fromJson(stage.getCommandParamsStage(), type);
 
-    Assert.assertTrue(commandParameters.containsKey("test"));
+    Assert.assertTrue(commandParametersStage.containsKey("test"));
     Assert.assertEquals("HDFS", cmd.getServiceName());
     Assert.assertEquals("DATANODE", cmd.getComponentName());
 
@@ -4063,24 +4068,24 @@ public class AmbariManagementControllerTest {
     Assert.assertNotNull(stages);
 
     HostRoleCommand hrc = null;
+    Type type = new TypeToken<Map<String, String>>(){}.getType();
     for (Stage stage : stages) {
       for (HostRoleCommand cmd : stage.getOrderedHostRoleCommands()) {
         if (cmd.getRole().equals(Role.HDFS_CLIENT)) {
           hrc = cmd;
         }
+        Map<String, String> hostParamStage = StageUtils.getGson().fromJson(stage.getHostParamsStage(), type);
+        Assert.assertTrue(hostParamStage.containsKey(ExecutionCommand.KeyNames.DB_DRIVER_FILENAME));
+        Assert.assertTrue(hostParamStage.containsKey(ExecutionCommand.KeyNames.MYSQL_JDBC_URL));
+        Assert.assertTrue(hostParamStage.containsKey(ExecutionCommand.KeyNames.ORACLE_JDBC_URL));
       }
     }
     Assert.assertNotNull(hrc);
     Assert.assertEquals("RESTART HDFS/HDFS_CLIENT", hrc.getCommandDetail());
     Map<String, String> roleParams = hrc.getExecutionCommandWrapper()
       .getExecutionCommand().getRoleParams();
-    Map<String, String> hostParams = hrc.getExecutionCommandWrapper()
-        .getExecutionCommand().getHostLevelParams();
 
     Assert.assertNotNull(roleParams);
-    Assert.assertTrue(hostParams.containsKey(ExecutionCommand.KeyNames.DB_DRIVER_FILENAME));
-    Assert.assertTrue(hostParams.containsKey(ExecutionCommand.KeyNames.MYSQL_JDBC_URL));
-    Assert.assertTrue(hostParams.containsKey(ExecutionCommand.KeyNames.ORACLE_JDBC_URL));
     Assert.assertEquals("CLIENT", roleParams.get(ExecutionCommand.KeyNames.COMPONENT_CATEGORY));
     Assert.assertTrue(hrc.getExecutionCommandWrapper().getExecutionCommand().getCommandParams().containsKey("hdfs_client"));
     Assert.assertEquals("abc", hrc.getExecutionCommandWrapper().getExecutionCommand().getCommandParams().get("hdfs_client"));
@@ -6321,7 +6326,11 @@ public class AmbariManagementControllerTest {
     Assert.assertNotNull(nnCommand);
     ExecutionCommand cmd = nnCommand.getExecutionCommandWrapper().getExecutionCommand();
     Assert.assertEquals("a1", cmd.getRole());
-    Assert.assertTrue(cmd.getCommandParams().containsKey("test"));
+    Type type = new TypeToken<Map<String, String>>(){}.getType();
+    for (Stage stage : actionDB.getAllStages(response.getRequestId())){
+      Map<String, String> commandParamsStage = StageUtils.getGson().fromJson(stage.getCommandParamsStage(), type);
+      Assert.assertTrue(commandParamsStage.containsKey("test"));
+    }
   }
 
   @Test
@@ -6400,8 +6409,6 @@ public class AmbariManagementControllerTest {
     int expectedRestartCount = 0;
     for (HostRoleCommand hrc : storedTasks) {
       Assert.assertEquals("RESTART", hrc.getCustomCommandName());
-      Assert.assertNotNull(hrc.getExecutionCommandWrapper()
-          .getExecutionCommand().getCommandParams().get("jdk_location"));
 
       if (hrc.getHostName().equals("h1") && hrc.getRole().equals(Role.DATANODE)) {
         expectedRestartCount++;
@@ -6506,6 +6513,13 @@ public class AmbariManagementControllerTest {
         }
       }
     }
+ 
+    Type type = new TypeToken<Map<String, String>>(){}.getType();
+    for (Stage stage : actionDB.getAllStages(requestId)){
+      Map<String, String> hostParamsStage = StageUtils.getGson().fromJson(stage.getHostParamsStage(), type);
+      Assert.assertNotNull(hostParamsStage.get("jdk_location"));
+    }
+
     Assert.assertEquals(true, serviceCheckFound);
   }
 
@@ -6630,15 +6644,21 @@ public class AmbariManagementControllerTest {
         .getDesiredState());
 
     List<Stage> stages = actionDB.getAllStages(trackAction.getRequestId());
-    Map<String, String> params = stages.get(0).getOrderedHostRoleCommands().get
+    Type type = new TypeToken<Map<String, String>>(){}.getType();
+
+    for (Stage stage : stages){
+      Map<String, String> params = StageUtils.getGson().fromJson(stage.getHostParamsStage(), type);
+      Assert.assertEquals("0.1", params.get("stack_version"));
+      Assert.assertNotNull(params.get("jdk_location"));
+      Assert.assertNotNull(params.get("db_name"));
+      Assert.assertNotNull(params.get("mysql_jdbc_url"));
+      Assert.assertNotNull(params.get("oracle_jdbc_url"));
+    }
+
+    Map<String, String> paramsCmd = stages.get(0).getOrderedHostRoleCommands().get
       (0).getExecutionCommandWrapper().getExecutionCommand()
       .getHostLevelParams();
-    Assert.assertEquals("0.1", params.get("stack_version"));
-    Assert.assertNotNull(params.get("jdk_location"));
-    Assert.assertNotNull(params.get("repo_info"));
-    Assert.assertNotNull(params.get("db_name"));
-    Assert.assertNotNull(params.get("mysql_jdbc_url"));
-    Assert.assertNotNull(params.get("oracle_jdbc_url"));
+    Assert.assertNotNull(paramsCmd.get("repo_info"));
   }
 
   @Test
@@ -7874,7 +7894,8 @@ public class AmbariManagementControllerTest {
 
 
     List<Stage> stages = new ArrayList<Stage>();
-    stages.add(new Stage(requestId1, "/a1", clusterName, 1L, context, CLUSTER_HOST_INFO));
+    stages.add(new Stage(requestId1, "/a1", clusterName, 1L, context,
+      CLUSTER_HOST_INFO, "", ""));
     stages.get(0).setStageId(1);
     stages.get(0).addHostRoleExecutionCommand(hostName1, Role.HBASE_MASTER,
             RoleCommand.START,
@@ -7882,14 +7903,16 @@ public class AmbariManagementControllerTest {
                     hostName1, System.currentTimeMillis()),
             clusterName, "HBASE");
 
-    stages.add(new Stage(requestId1, "/a2", clusterName, 1L, context, CLUSTER_HOST_INFO));
+    stages.add(new Stage(requestId1, "/a2", clusterName, 1L, context,
+      CLUSTER_HOST_INFO, "", ""));
     stages.get(1).setStageId(2);
     stages.get(1).addHostRoleExecutionCommand(hostName1, Role.HBASE_CLIENT,
             RoleCommand.START,
             new ServiceComponentHostStartEvent(Role.HBASE_CLIENT.toString(),
                     hostName1, System.currentTimeMillis()), clusterName, "HBASE");
 
-    stages.add(new Stage(requestId1, "/a3", clusterName, 1L, context, CLUSTER_HOST_INFO));
+    stages.add(new Stage(requestId1, "/a3", clusterName, 1L, context,
+      CLUSTER_HOST_INFO, "", ""));
     stages.get(2).setStageId(3);
     stages.get(2).addHostRoleExecutionCommand(hostName1, Role.HBASE_CLIENT,
             RoleCommand.START,
@@ -7900,14 +7923,16 @@ public class AmbariManagementControllerTest {
     actionDB.persistActions(request);
 
     stages.clear();
-    stages.add(new Stage(requestId2, "/a4", clusterName, 1L, context, CLUSTER_HOST_INFO));
+    stages.add(new Stage(requestId2, "/a4", clusterName, 1L, context,
+      CLUSTER_HOST_INFO, "", ""));
     stages.get(0).setStageId(4);
     stages.get(0).addHostRoleExecutionCommand(hostName1, Role.HBASE_CLIENT,
             RoleCommand.START,
             new ServiceComponentHostStartEvent(Role.HBASE_CLIENT.toString(),
                     hostName1, System.currentTimeMillis()), clusterName, "HBASE");
 
-    stages.add(new Stage(requestId2, "/a5", clusterName, 1L, context, CLUSTER_HOST_INFO));
+    stages.add(new Stage(requestId2, "/a5", clusterName, 1L, context,
+      CLUSTER_HOST_INFO, "", ""));
     stages.get(1).setStageId(5);
     stages.get(1).addHostRoleExecutionCommand(hostName1, Role.HBASE_CLIENT,
             RoleCommand.START,
@@ -10142,7 +10167,9 @@ public class AmbariManagementControllerTest {
     Assert.assertEquals(hostname1, task.getHostName());
 
     ExecutionCommand cmd = task.getExecutionCommandWrapper().getExecutionCommand();
-    Assert.assertTrue(cmd.getCommandParams().containsKey("some_custom_param"));
+    Type type = new TypeToken<Map<String, String>>(){}.getType();
+    Map<String, String> commandParamsStage = StageUtils.getGson().fromJson(stage.getCommandParamsStage(), type);
+    Assert.assertTrue(commandParamsStage.containsKey("some_custom_param"));
     Assert.assertEquals(null, cmd.getServiceName());
     Assert.assertEquals(null, cmd.getComponentName());
 
@@ -10181,7 +10208,8 @@ public class AmbariManagementControllerTest {
     Assert.assertEquals(hostname1, task.getHostName());
 
     cmd = task.getExecutionCommandWrapper().getExecutionCommand();
-    Assert.assertTrue(cmd.getCommandParams().containsKey("some_custom_param"));
+    commandParamsStage = StageUtils.getGson().fromJson(stage.getCommandParamsStage(), type);
+    Assert.assertTrue(commandParamsStage.containsKey("some_custom_param"));
     Assert.assertEquals(null, cmd.getServiceName());
     Assert.assertEquals(null, cmd.getComponentName());
   }

http://git-wip-us.apache.org/repos/asf/ambari/blob/9faeaf5a/ambari-server/src/test/java/org/apache/ambari/server/stageplanner/TestStagePlanner.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/stageplanner/TestStagePlanner.java b/ambari-server/src/test/java/org/apache/ambari/server/stageplanner/TestStagePlanner.java
index cf1e412..f008980 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/stageplanner/TestStagePlanner.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/stageplanner/TestStagePlanner.java
@@ -67,7 +67,7 @@ public class TestStagePlanner {
 
     RoleGraph rg = new RoleGraph(rco);
     String hostname = "dummy";
-    Stage stage = StageUtils.getATestStage(1, 1, hostname);
+    Stage stage = StageUtils.getATestStage(1, 1, hostname, "", "");
     rg.build(stage);
     List<Stage> outStages = rg.getStages();
     for (Stage s: outStages) {
@@ -86,7 +86,7 @@ public class TestStagePlanner {
     rco.initialize(cluster);
     RoleGraph rg = new RoleGraph(rco);
     long now = System.currentTimeMillis();
-    Stage stage = StageUtils.getATestStage(1, 1, "host1");
+    Stage stage = StageUtils.getATestStage(1, 1, "host1", "", "");
     stage.addHostRoleExecutionCommand("host2", Role.HBASE_MASTER,
         RoleCommand.START, new ServiceComponentHostStartEvent("HBASE_MASTER",
             "host2", now), "cluster1", "HBASE");
@@ -112,7 +112,7 @@ public class TestStagePlanner {
     rco.initialize(cluster);
     RoleGraph rg = new RoleGraph(rco);
     long now = System.currentTimeMillis();
-    Stage stage = StageUtils.getATestStage(1, 1, "host1");
+    Stage stage = StageUtils.getATestStage(1, 1, "host1", "", "");
     stage.addHostRoleExecutionCommand("host11", Role.SECONDARY_NAMENODE,
         RoleCommand.START, new ServiceComponentHostStartEvent("SECONDARY_NAMENODE",
             "host11", now), "cluster1", "HDFS");


[33/35] git commit: AMBARI-7033 Unable to open Job details(Max Shepel via ababiichuk)

Posted by jo...@apache.org.
AMBARI-7033 Unable to open Job details(Max Shepel via ababiichuk)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/1f48b24f
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/1f48b24f
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/1f48b24f

Branch: refs/heads/branch-alerts-dev
Commit: 1f48b24feae75f35a0ea6f10dcf61332934b8534
Parents: 8d8583b
Author: aBabiichuk <ab...@cybervisiontech.com>
Authored: Wed Aug 27 14:59:38 2014 +0300
Committer: aBabiichuk <ab...@cybervisiontech.com>
Committed: Wed Aug 27 14:59:38 2014 +0300

----------------------------------------------------------------------
 ambari-web/app/mappers.js | 1 +
 1 file changed, 1 insertion(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/1f48b24f/ambari-web/app/mappers.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/mappers.js b/ambari-web/app/mappers.js
index 80c7255..aa7aea0 100644
--- a/ambari-web/app/mappers.js
+++ b/ambari-web/app/mappers.js
@@ -20,6 +20,7 @@
 require('mappers/server_data_mapper');
 require('mappers/stack_service_mapper');
 require('mappers/hosts_mapper');
+require('mappers/jobs_mapper');
 require('mappers/cluster_mapper');
 require('mappers/runs_mapper');
 require('mappers/racks_mapper');


[25/35] git commit: AMBARI-7022. FE: Enable Dynamic configs and layout support in UI (srimanth)

Posted by jo...@apache.org.
AMBARI-7022. FE: Enable Dynamic configs and layout support in UI (srimanth)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/f8193e94
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/f8193e94
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/f8193e94

Branch: refs/heads/branch-alerts-dev
Commit: f8193e941f22bac6bbc64a141613d8f8bf90633b
Parents: 02ee3d4
Author: Srimanth Gunturi <sg...@hortonworks.com>
Authored: Tue Aug 26 13:11:53 2014 -0700
Committer: Srimanth Gunturi <sg...@hortonworks.com>
Committed: Tue Aug 26 13:11:53 2014 -0700

----------------------------------------------------------------------
 ambari-web/app/config.js | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/f8193e94/ambari-web/app/config.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/config.js b/ambari-web/app/config.js
index be1fd97..b734111 100644
--- a/ambari-web/app/config.js
+++ b/ambari-web/app/config.js
@@ -79,7 +79,7 @@ App.supports = {
   flume: true,
   databaseConnection: true,
   configHistory: false,
-  serverRecommendValidate: false,
+  serverRecommendValidate: true,
   downloadClientConfigs: true,
   abortRequests: true
 };


[35/35] git commit: Merge branch 'trunk' into branch-alerts-dev

Posted by jo...@apache.org.
Merge branch 'trunk' into branch-alerts-dev

Conflicts:
	ambari-server/src/main/java/org/apache/ambari/server/agent/ExecutionCommand.java
	ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClusterImpl.java
	ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog170.java
	ambari-server/src/main/java/org/apache/ambari/server/utils/StageUtils.java
	ambari-server/src/test/java/org/apache/ambari/server/utils/TestStageUtils.java


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/ae8f1e77
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/ae8f1e77
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/ae8f1e77

Branch: refs/heads/branch-alerts-dev
Commit: ae8f1e77eafe32c2ad8bded7da24a2cef07b605f
Parents: 4a4644b a826334
Author: Jonathan Hurley <jh...@hortonworks.com>
Authored: Wed Aug 27 10:04:14 2014 -0400
Committer: Jonathan Hurley <jh...@hortonworks.com>
Committed: Wed Aug 27 10:04:14 2014 -0400

----------------------------------------------------------------------
 ambari-agent/conf/unix/install-helper.sh        |   7 -
 ambari-agent/pom.xml                            |  11 +-
 ambari-server/conf/unix/install-helper.sh       |   3 -
 .../server/actionmanager/ActionScheduler.java   |  33 ++
 .../ambari/server/actionmanager/Stage.java      |  32 +-
 .../server/actionmanager/StageFactory.java      |   4 +-
 .../ambari/server/agent/ExecutionCommand.java   |   2 +-
 .../resources/RequestResourceDefinition.java    |  12 +-
 .../ServiceConfigVersionResourceDefinition.java |  10 +-
 .../server/api/services/AmbariMetaInfo.java     |  18 +
 .../stackadvisor/StackAdvisorHelper.java        |  14 +-
 .../stackadvisor/StackAdvisorHelper.java.orig   | 116 ----
 .../stackadvisor/StackAdvisorResponse.java      |  73 +++
 ...GetComponentLayoutRecommnedationCommand.java |   5 +-
 .../GetComponentLayoutValidationCommand.java    |   5 +-
 .../GetConfigurationRecommnedationCommand.java  |  18 +-
 .../GetConfigurationValidationCommand.java      |   5 +-
 .../commands/StackAdvisorCommand.java           |  30 +-
 .../recommendations/RecommendationResponse.java |  38 +-
 .../validations/ValidationResponse.java         |  15 +-
 .../server/api/util/StackExtensionHelper.java   |   9 +
 .../controller/AmbariActionExecutionHelper.java |   9 +-
 .../AmbariCustomCommandExecutionHelper.java     |  34 +-
 .../AmbariManagementControllerImpl.java         | 184 ++++---
 .../server/controller/ClusterRequest.java       |  28 +-
 .../server/controller/ClusterResponse.java      |   9 +-
 .../server/controller/ConfigGroupRequest.java   |   9 +
 .../ambari/server/controller/HostRequest.java   |  11 +-
 .../controller/ServiceConfigVersionRequest.java |  23 +
 .../ServiceConfigVersionResponse.java           |  13 +-
 .../StackServiceComponentResponse.java          |  33 +-
 .../server/controller/StackServiceResponse.java |  16 +-
 .../internal/AbstractResourceProvider.java      |  92 ++--
 .../internal/BaseBlueprintProcessor.java        |  37 +-
 .../BlueprintConfigurationProcessor.java        |   5 +-
 .../internal/ClusterResourceProvider.java       | 126 +++--
 .../internal/ConfigGroupResourceProvider.java   |   4 +
 .../internal/HostResourceProvider.java          |  54 +-
 .../RecommendationResourceProvider.java         |   4 +-
 .../ServiceConfigVersionResourceProvider.java   |  12 +-
 .../StackServiceComponentResourceProvider.java  |  22 +-
 .../internal/StackServiceResourceProvider.java  |  22 +-
 .../internal/ValidationResourceProvider.java    |   4 +-
 .../internal/ViewInstanceResourceProvider.java  |   9 +-
 .../internal/ViewVersionResourceProvider.java   |   3 +
 .../ambari/server/orm/dao/ServiceConfigDAO.java |  41 +-
 .../server/orm/entities/AlertGroupEntity.java   |  74 ++-
 .../ambari/server/orm/entities/StageEntity.java |  28 +
 .../ambari/server/orm/entities/ViewEntity.java  |  39 +-
 .../ambari/server/stageplanner/RoleGraph.java   |   3 +-
 .../org/apache/ambari/server/state/Cluster.java |  15 +-
 .../ambari/server/state/ComponentInfo.java      |  14 +-
 .../ambari/server/state/ConfigHelper.java       |  15 +-
 .../apache/ambari/server/state/ServiceInfo.java |   9 +
 .../server/state/cluster/ClusterImpl.java       | 119 +++-
 .../server/upgrade/AbstractUpgradeCatalog.java  |   3 +-
 .../server/upgrade/UpgradeCatalog170.java       |  19 +-
 .../apache/ambari/server/utils/StageUtils.java  |  13 +-
 .../apache/ambari/server/view/ViewRegistry.java |  15 +
 .../server/view/configuration/ViewConfig.java   |  14 +
 .../main/resources/Ambari-DDL-MySQL-CREATE.sql  |   5 +-
 .../main/resources/Ambari-DDL-Oracle-CREATE.sql |   5 +-
 .../resources/Ambari-DDL-Postgres-CREATE.sql    |   5 +-
 .../Ambari-DDL-Postgres-EMBEDDED-CREATE.sql     |   5 +-
 .../main/resources/custom_actions/check_host.py |   7 +-
 .../src/main/resources/properties.json          |   7 +-
 .../src/main/resources/scripts/stack_advisor.py |   5 +-
 .../1.3.2/hooks/before-ANY/scripts/params.py    |   3 +-
 .../hooks/before-INSTALL/scripts/params.py      |   2 +-
 .../HDP/1.3.2/services/GANGLIA/metainfo.xml     |   3 +
 .../HDP/1.3.2/services/HBASE/metainfo.xml       |   4 +
 .../stacks/HDP/1.3.2/services/HDFS/metainfo.xml |   5 +
 .../stacks/HDP/1.3.2/services/HIVE/metainfo.xml |  11 +-
 .../services/HIVE/package/scripts/params.py     |   2 +-
 .../HDP/1.3.2/services/MAPREDUCE/metainfo.xml   |   5 +
 .../HDP/1.3.2/services/NAGIOS/metainfo.xml      |   2 +
 .../HDP/1.3.2/services/OOZIE/metainfo.xml       |   3 +
 .../stacks/HDP/1.3.2/services/PIG/metainfo.xml  |   2 +
 .../HDP/1.3.2/services/SQOOP/metainfo.xml       |   2 +
 .../HDP/1.3.2/services/WEBHCAT/metainfo.xml     |   4 +-
 .../HDP/1.3.2/services/ZOOKEEPER/metainfo.xml   |   3 +
 .../stacks/HDP/1.3.2/services/stack_advisor.py  | 519 ++++++++++++++++++
 .../stacks/HDP/1.3.3/services/stack_advisor.py  |  25 +
 .../stacks/HDP/1.3/services/stack_advisor.py    |  25 +
 .../2.0.6/hooks/before-ANY/scripts/params.py    |   3 +-
 .../hooks/before-INSTALL/scripts/params.py      |   2 +-
 .../HDP/2.0.6/services/FLUME/metainfo.xml       |   2 +
 .../HDP/2.0.6/services/GANGLIA/metainfo.xml     |   3 +
 .../HDP/2.0.6/services/HBASE/metainfo.xml       |   4 +
 .../stacks/HDP/2.0.6/services/HDFS/metainfo.xml |   7 +
 .../stacks/HDP/2.0.6/services/HIVE/metainfo.xml |  11 +-
 .../services/HIVE/package/scripts/params.py     |   2 +-
 .../HDP/2.0.6/services/NAGIOS/metainfo.xml      |   2 +
 .../HDP/2.0.6/services/OOZIE/metainfo.xml       |   3 +
 .../stacks/HDP/2.0.6/services/PIG/metainfo.xml  |   2 +
 .../HDP/2.0.6/services/SQOOP/metainfo.xml       |   2 +
 .../HDP/2.0.6/services/WEBHCAT/metainfo.xml     |   4 +-
 .../stacks/HDP/2.0.6/services/YARN/metainfo.xml |   7 +
 .../HDP/2.0.6/services/ZOOKEEPER/metainfo.xml   |   2 +
 .../FALCON/configuration/falcon-env.xml         |  63 +++
 .../2.1.GlusterFS/services/FALCON/metainfo.xml  |   2 +-
 .../services/FALCON/package/scripts/params.py   |  32 +-
 .../FALCON/package/scripts/status_params.py     |   2 +-
 .../GLUSTERFS/configuration/core-site.xml       |  26 -
 .../GLUSTERFS/configuration/hadoop-env.xml      | 207 +++++++
 .../services/GLUSTERFS/metainfo.xml             |   7 +-
 .../2.1.GlusterFS/services/OOZIE/metainfo.xml   |   2 +-
 .../services/STORM/configuration/storm-env.xml  |  39 ++
 .../2.1.GlusterFS/services/STORM/metainfo.xml   |   2 +-
 .../services/STORM/package/scripts/params.py    |  12 +-
 .../STORM/package/scripts/status_params.py      |   2 +-
 .../services/TEZ/configuration/tez-env.xml      |  29 +
 .../HDP/2.1.GlusterFS/services/TEZ/metainfo.xml |   2 +-
 .../services/TEZ/package/scripts/params.py      |   4 +-
 .../services/YARN/configuration/global.xml      |  64 ---
 .../services/YARN/configuration/yarn-env.xml    | 181 +++++++
 .../2.1.GlusterFS/services/YARN/metainfo.xml    |   3 +-
 .../services/YARN/package/scripts/params.py     |  41 +-
 .../YARN/package/scripts/status_params.py       |   8 +-
 .../stacks/HDP/2.1/services/FALCON/metainfo.xml |   3 +
 .../stacks/HDP/2.1/services/STORM/metainfo.xml  |   6 +
 .../stacks/HDP/2.1/services/TEZ/metainfo.xml    |   2 +
 .../stacks/HDP/2.1/services/YARN/metainfo.xml   |   2 +
 .../stacks/HDP/2.1/services/stack_advisor.py    |   7 -
 .../ExecutionCommandWrapperTest.java            |   2 +-
 .../actionmanager/TestActionDBAccessorImpl.java |  20 +-
 .../server/actionmanager/TestActionManager.java |  31 +-
 .../actionmanager/TestActionScheduler.java      |  33 +-
 .../ambari/server/actionmanager/TestStage.java  |   4 +-
 .../server/agent/TestHeartbeatHandler.java      |  15 +-
 .../server/agent/TestHeartbeatMonitor.java      |   4 +-
 .../server/api/services/AmbariMetaInfoTest.java |   9 +
 .../stackadvisor/StackAdvisorHelperTest.java    |  22 +-
 ...tConfigurationRecommnedationCommandTest.java | 103 ++++
 .../commands/StackAdvisorCommandTest.java       |  86 ++-
 .../api/util/StackExtensionHelperTest.java      |   2 +
 .../AmbariManagementControllerTest.java         | 164 +++---
 ...hYarnCapacitySchedulerReleaseConfigTest.java |   4 +-
 .../internal/ClusterResourceProviderTest.java   | 537 ++++++++++++++++++-
 .../internal/JMXHostProviderTest.java           |  10 +-
 .../RecommendationResourceProviderTest.java     |  31 ++
 .../ValidationResourceProviderTest.java         |  70 +++
 .../ViewInstanceResourceProviderTest.java       |  89 ++-
 .../server/stageplanner/TestStagePlanner.java   |   6 +-
 .../ambari/server/state/ConfigHelperTest.java   |   6 +-
 .../server/state/cluster/ClusterTest.java       |  66 ++-
 .../server/state/cluster/ClustersTest.java      |   3 +-
 .../svccomphost/ServiceComponentHostTest.java   |   3 +-
 .../server/upgrade/UpgradeCatalog170Test.java   |  41 +-
 .../server/upgrade/UpgradeCatalogTest.java      |   2 +-
 .../ambari/server/utils/TestStageUtils.java     |   6 +-
 .../view/configuration/ViewConfigTest.java      |   7 +
 ambari-server/src/test/python/TestCheckHost.py  |  29 +-
 .../stacks/1.3.2/HIVE/test_hive_metastore.py    |   4 +-
 .../stacks/1.3.2/HIVE/test_hive_server.py       |   4 +-
 .../1.3.2/hooks/before-ANY/test_before_any.py   |   6 +-
 .../hooks/before-INSTALL/test_before_install.py |   4 +-
 .../stacks/2.0.6/HIVE/test_hive_metastore.py    |   4 +-
 .../stacks/2.0.6/HIVE/test_hive_server.py       |   4 +-
 .../2.0.6/hooks/before-ANY/test_before_any.py   |   6 +-
 .../hooks/before-INSTALL/test_before_install.py |   4 +-
 .../stacks/2.1/HIVE/test_hive_metastore.py      |   4 +-
 .../HDP/2.0.6/services/NAGIOS/metainfo.xml      |   2 +
 .../HDP/2.0.6/services/WEBHCAT/metainfo.xml     |   1 +
 .../stacks/HDP/2.0.6/services/YARN/metainfo.xml |   4 +
 .../HDP/2.0.7/services/HBASE/metainfo.xml       |   4 +
 .../stacks/HDP/2.0.7/services/HDFS/metainfo.xml |   7 +
 .../stacks/HDP/2.0.7/services/HIVE/metainfo.xml |   5 +
 .../HDP/2.0.7/services/ZOOKEEPER/metainfo.xml   |   3 +
 .../HDP/2.0.8/services/SQOOP/metainfo.xml       |   1 +
 .../HDP/2.1.1/services/STORM/metainfo.xml       |   4 +
 .../org/apache/ambari/view/ViewDefinition.java  |   7 +
 .../data/configurations/service_version.json    |   7 +-
 .../data/configurations/service_versions.json   |  77 +--
 ambari-web/app/assets/test/tests.js             |   2 +
 ambari-web/app/config.js                        |   2 +-
 .../main/dashboard/config_history_controller.js |   4 +-
 .../controllers/main/service/info/configs.js    |   8 +-
 .../app/controllers/wizard/step3_controller.js  |   1 -
 ambari-web/app/data/HDP2/secure_properties.js   |   1 +
 ambari-web/app/data/HDP2/site_properties.js     | 178 +++---
 ambari-web/app/data/site_properties.js          | 160 +++---
 ambari-web/app/mappers.js                       |   1 +
 .../mappers/service_config_version_mapper.js    |   3 +-
 ambari-web/app/mappers/stack_service_mapper.js  |   2 +
 ambari-web/app/messages.js                      |   5 +-
 ambari-web/app/mixins.js                        |   1 -
 ambari-web/app/mixins/common/serverValidator.js |  19 +-
 ambari-web/app/mixins/models/service_mixin.js   |  30 --
 ambari-web/app/models/host_component.js         |  28 +-
 ambari-web/app/models/service.js                |   7 +-
 ambari-web/app/models/service_config_version.js |  12 +-
 ambari-web/app/models/stack_service.js          |  13 +-
 .../app/models/stack_service_component.js       |  10 +-
 ambari-web/app/styles/application.less          |  58 +-
 .../common/configs/config_history_flow.hbs      |  42 +-
 .../templates/main/dashboard/config_history.hbs |  11 +-
 ambari-web/app/utils/ajax/ajax.js               |   2 +-
 ambari-web/app/utils/config.js                  |   2 +-
 ambari-web/app/utils/helper.js                  |  15 +-
 .../views/common/configs/config_history_flow.js |  67 ++-
 .../common/configs/overriddenProperty_view.js   |   1 +
 .../app/views/common/configs/services_config.js |   3 +-
 .../views/main/dashboard/config_history_view.js |  12 +-
 ambari-web/test/app_test.js                     |  54 +-
 .../dashboard/config_history_controller_test.js |  25 +-
 ambari-web/test/models/host_component_test.js   | 212 +++++++-
 ambari-web/test/models/host_test.js             | 357 +++++++++++-
 ambari-web/test/models/service_test.js          |  59 +-
 ambari-web/test/models/stack_service_test.js    | 343 ++++++++++++
 ambari-web/test/service_components.js           |  61 +++
 ambari-web/test/utils/blueprint_test.js         |   1 -
 ambari-web/test/utils/date_test.js              |   4 +-
 .../common/configs/config_history_flow_test.js  |  26 +-
 .../src/main/resources/ui/app/styles/main.less  |   7 +-
 docs/pom.xml                                    |  18 +-
 216 files changed, 5151 insertions(+), 1390 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/ae8f1e77/ambari-agent/pom.xml
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/ae8f1e77/ambari-server/src/main/java/org/apache/ambari/server/agent/ExecutionCommand.java
----------------------------------------------------------------------
diff --cc ambari-server/src/main/java/org/apache/ambari/server/agent/ExecutionCommand.java
index 0600f27,33c5af4..75ba178
--- a/ambari-server/src/main/java/org/apache/ambari/server/agent/ExecutionCommand.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/agent/ExecutionCommand.java
@@@ -42,53 -42,23 +42,53 @@@ public class ExecutionCommand extends A
      super(AgentCommandType.EXECUTION_COMMAND);
    }
  
 +  @SerializedName("clusterName")
    private String clusterName;
 +
 +  @SerializedName("taskId")
    private long taskId;
 +
 +  @SerializedName("commandId")
    private String commandId;
 +
 +  @SerializedName("hostname")
    private String hostname;
 +
 +  @SerializedName("role")
    private String role;
 +
 +  @SerializedName("hostLevelParams")
    private Map<String, String> hostLevelParams = new HashMap<String, String>();
 +
 +  @SerializedName("roleParams")
    private Map<String, String> roleParams = null;
 +
 +  @SerializedName("roleCommand")
    private RoleCommand roleCommand;
 -  private Map<String, Set<String>> clusterHostInfo = 
 +
 +  @SerializedName("clusterHostInfo")
 +  private Map<String, Set<String>> clusterHostInfo =
        new HashMap<String, Set<String>>();
 +
 +  @SerializedName("configurations")
    private Map<String, Map<String, String>> configurations;
 +
    @SerializedName("configuration_attributes")
    private Map<String, Map<String, Map<String, String>>> configurationAttributes;
 +
 +  @SerializedName("configurationTags")
    private Map<String, Map<String, String>> configurationTags;
 +
 +  @SerializedName("forceRefreshConfigTags")
    private Set<String> forceRefreshConfigTags = new HashSet<String>();
 +
 +  @SerializedName("commandParams")
-   private Map<String, String> commandParams;
+   private Map<String, String> commandParams = new HashMap<String, String>();
 +
 +  @SerializedName("serviceName")
    private String serviceName;
 +
 +  @SerializedName("componentName")
    private String componentName;
  
    /**

http://git-wip-us.apache.org/repos/asf/ambari/blob/ae8f1e77/ambari-server/src/main/java/org/apache/ambari/server/api/services/AmbariMetaInfo.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/ae8f1e77/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClusterImpl.java
----------------------------------------------------------------------
diff --cc ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClusterImpl.java
index 5d904ef,c3b6a86..b181399
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClusterImpl.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClusterImpl.java
@@@ -1370,10 -1338,9 +1370,10 @@@ public class ClusterImpl implements Clu
    }
  
    @Override
-   public ServiceConfigVersionResponse addDesiredConfig(String user, Config config, String serviceConfigVersionNote) {
+   public ServiceConfigVersionResponse addDesiredConfig(String user, Set<Config> configs, String serviceConfigVersionNote) {
 -    if (null == user)
 +    if (null == user) {
        throw new NullPointerException("User must be specified.");
 +    }
  
      clusterGlobalLock.readLock().lock();
      try {
@@@ -1525,10 -1505,9 +1538,10 @@@
    }
  
    @Override
-   public boolean setServiceConfigVersion(String serviceName, Long version, String user, String note) throws AmbariException {
+   public ServiceConfigVersionResponse setServiceConfigVersion(String serviceName, Long version, String user, String note) throws AmbariException {
 -    if (null == user)
 +    if (null == user) {
        throw new NullPointerException("User must be specified.");
 +    }
  
      clusterGlobalLock.writeLock().lock();
      try {
@@@ -1644,9 -1653,18 +1687,18 @@@
      serviceConfigVersionResponse.setClusterName(getClusterName());
      serviceConfigVersionResponse.setServiceName(serviceConfigEntity.getServiceName());
      serviceConfigVersionResponse.setVersion(serviceConfigEntity.getVersion());
 -    serviceConfigVersionResponse.setCreateTime(serviceConfigEntity.getCreateTimestamp());    
 +    serviceConfigVersionResponse.setCreateTime(serviceConfigEntity.getCreateTimestamp());
      serviceConfigVersionResponse.setUserName(serviceConfigEntity.getUser());
      serviceConfigVersionResponse.setNote(serviceConfigEntity.getNote());
+     if (clusterConfigGroups != null) {
+       ConfigGroup configGroup = clusterConfigGroups.get(serviceConfigEntity.getGroupId());
+       if (configGroup != null) {
+         serviceConfigVersionResponse.setGroupId(configGroup.getId());
+         serviceConfigVersionResponse.setGroupName(configGroup.getName());
+       }
+     }
+ 
+ 
      return serviceConfigVersionResponse;
    }
  

http://git-wip-us.apache.org/repos/asf/ambari/blob/ae8f1e77/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog170.java
----------------------------------------------------------------------
diff --cc ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog170.java
index 0acd61b,0ac9da7..0884c50
--- a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog170.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog170.java
@@@ -73,8 -73,6 +73,7 @@@ import org.apache.ambari.server.state.C
  import org.apache.ambari.server.state.Clusters;
  import org.apache.ambari.server.state.Config;
  import org.apache.ambari.server.state.ConfigHelper;
 +import org.apache.ambari.server.state.alert.Scope;
- import org.apache.ambari.server.view.configuration.InstanceConfig;
  import org.slf4j.Logger;
  import org.slf4j.LoggerFactory;
  

http://git-wip-us.apache.org/repos/asf/ambari/blob/ae8f1e77/ambari-server/src/main/java/org/apache/ambari/server/utils/StageUtils.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/ae8f1e77/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/ae8f1e77/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/ae8f1e77/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/ae8f1e77/ambari-server/src/main/resources/Ambari-DDL-Postgres-EMBEDDED-CREATE.sql
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/ae8f1e77/ambari-server/src/main/resources/properties.json
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/ae8f1e77/ambari-server/src/test/java/org/apache/ambari/server/api/services/AmbariMetaInfoTest.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/ae8f1e77/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/ae8f1e77/ambari-server/src/test/java/org/apache/ambari/server/state/svccomphost/ServiceComponentHostTest.java
----------------------------------------------------------------------
diff --cc ambari-server/src/test/java/org/apache/ambari/server/state/svccomphost/ServiceComponentHostTest.java
index d7c8c54,5d85b0d..79ae350
--- a/ambari-server/src/test/java/org/apache/ambari/server/state/svccomphost/ServiceComponentHostTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/state/svccomphost/ServiceComponentHostTest.java
@@@ -1045,9 -1042,9 +1046,9 @@@ public class ServiceComponentHostTest 
      config.setTag(tag);
      config.persist();
      cluster.addConfig(config);
-     cluster.addDesiredConfig("user", config);
+     cluster.addDesiredConfig("user", Collections.singleton(config));
    }
 -  
 +
    @Test
    public void testMaintenance() throws Exception {
      String stackVersion="HDP-2.0.6";

http://git-wip-us.apache.org/repos/asf/ambari/blob/ae8f1e77/ambari-server/src/test/java/org/apache/ambari/server/utils/TestStageUtils.java
----------------------------------------------------------------------
diff --cc ambari-server/src/test/java/org/apache/ambari/server/utils/TestStageUtils.java
index d3f0e37,988c67b..8b134c4
--- a/ambari-server/src/test/java/org/apache/ambari/server/utils/TestStageUtils.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/utils/TestStageUtils.java
@@@ -152,8 -147,8 +152,8 @@@ public class TestStageUtils 
    @Ignore
    public void testJasonToExecutionCommand() throws JsonGenerationException,
        JsonMappingException, JAXBException, IOException {
-     Stage s = StageUtils.getATestStage(1, 2, "host1", "clusterHostInfo");
+     Stage s = StageUtils.getATestStage(1, 2, "host1", "clusterHostInfo", "hostParamsStage");
 -    ExecutionCommand cmd = s.getExecutionCommands("host1").get(0).getExecutionCommand();    
 +    ExecutionCommand cmd = s.getExecutionCommands("host1").get(0).getExecutionCommand();
      HashMap<String, Map<String,String>> configTags = new HashMap<String, Map<String,String>>();
      Map<String, String> globalTag = new HashMap<String, String>();
      globalTag.put("tag", "version1");


[26/35] git commit: AMBARI-7025. Config Groups: clean up for config history page (group name filter .etc)(xiwang)

Posted by jo...@apache.org.
AMBARI-7025. Config Groups: clean up for config history page (group name filter .etc)(xiwang)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/1ab3bb55
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/1ab3bb55
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/1ab3bb55

Branch: refs/heads/branch-alerts-dev
Commit: 1ab3bb551d3e122fe1a771b98aaffe0e52434f40
Parents: f8193e9
Author: Xi Wang <xi...@apache.org>
Authored: Tue Aug 26 15:17:56 2014 -0700
Committer: Xi Wang <xi...@apache.org>
Committed: Tue Aug 26 16:24:39 2014 -0700

----------------------------------------------------------------------
 .../main/dashboard/config_history_controller.js         |  2 +-
 ambari-web/app/messages.js                              |  1 +
 .../templates/common/configs/config_history_flow.hbs    |  2 +-
 .../app/templates/main/dashboard/config_history.hbs     |  5 +++--
 .../app/views/main/dashboard/config_history_view.js     | 12 +++++++-----
 5 files changed, 13 insertions(+), 9 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/1ab3bb55/ambari-web/app/controllers/main/dashboard/config_history_controller.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/controllers/main/dashboard/config_history_controller.js b/ambari-web/app/controllers/main/dashboard/config_history_controller.js
index 2a81c28..002504a 100644
--- a/ambari-web/app/controllers/main/dashboard/config_history_controller.js
+++ b/ambari-web/app/controllers/main/dashboard/config_history_controller.js
@@ -41,7 +41,7 @@ App.MainConfigHistoryController = Em.ArrayController.extend(App.TableServerMixin
   colPropAssoc: function () {
     var associations = [];
     associations[1] = 'serviceVersion';
-    associations[2] = 'configGroupName';
+    associations[2] = 'configGroup';
     associations[3] = 'createTime';
     associations[4] = 'author';
     associations[5] = 'briefNotes';

http://git-wip-us.apache.org/repos/asf/ambari/blob/1ab3bb55/ambari-web/app/messages.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/messages.js b/ambari-web/app/messages.js
index a02b729..31d3e69 100644
--- a/ambari-web/app/messages.js
+++ b/ambari-web/app/messages.js
@@ -1994,6 +1994,7 @@ Em.I18n.translations = {
   'dashboard.configHistory.table.empty' : 'No history to display',
   'dashboard.configHistory.table.version.versionText' : 'V{0}',
   'dashboard.configHistory.table.current.tooltip' : 'Current config for {0}:{1}',
+  'dashboard.configHistory.table.restart.tooltip' : 'Restart required',
   'dashboard.configHistory.table.filteredHostsInfo': '{0} of {1} versions showing',
   'dashboard.configHistory.info-bar.authoredOn': 'authored on',
   'dashboard.configHistory.info-bar.changesToHandle': 'Changes to handle',

http://git-wip-us.apache.org/repos/asf/ambari/blob/1ab3bb55/ambari-web/app/templates/common/configs/config_history_flow.hbs
----------------------------------------------------------------------
diff --git a/ambari-web/app/templates/common/configs/config_history_flow.hbs b/ambari-web/app/templates/common/configs/config_history_flow.hbs
index 9aade90..8fe54c8 100644
--- a/ambari-web/app/templates/common/configs/config_history_flow.hbs
+++ b/ambari-web/app/templates/common/configs/config_history_flow.hbs
@@ -93,7 +93,7 @@
           {{/unless}}
         </ul>
       </div>
-        <div class="label-wrapper span9" data-toggle="tooltip" {{bindAttr data-original-title="view.displayedServiceVersion.briefNotes" }}>
+        <div class="label-wrapper span9" data-toggle="tooltip" {{bindAttr data-original-title="view.displayedServiceVersion.briefNotes"}}>
           <span class="label label-info">{{view.displayedServiceVersion.versionText}}</span>
           {{#if view.displayedServiceVersion.isCurrent}}
             <span class="label label-success">{{t common.current}}</span>

http://git-wip-us.apache.org/repos/asf/ambari/blob/1ab3bb55/ambari-web/app/templates/main/dashboard/config_history.hbs
----------------------------------------------------------------------
diff --git a/ambari-web/app/templates/main/dashboard/config_history.hbs b/ambari-web/app/templates/main/dashboard/config_history.hbs
index de8680a..f0101cc 100644
--- a/ambari-web/app/templates/main/dashboard/config_history.hbs
+++ b/ambari-web/app/templates/main/dashboard/config_history.hbs
@@ -45,11 +45,12 @@
               <a {{action goToServiceConfigs item.serviceName}}>
                 {{item.serviceName}}
               </a>
-              <i {{bindAttr class=":icon-refresh :restart-required-service item.isRestartRequired::hidden"}}></i>
+              <i {{bindAttr class=":icon-refresh :restart-required-service item.isRestartRequired::hidden"}}
+                rel="Tooltip" {{translateAttr data-original-title="dashboard.configHistory.table.restart.tooltip"}}></i>
             </td>
             <td>{{item.configGroupName}}
               {{#if item.isCurrent}}
-                <span class="label label-success" rel="currentTooltip"
+                <span class="label label-success" rel="Tooltip"
                 {{bindAttr data-original-title="item.currentTooltip"}}>{{t common.current}}
                 </span>
               {{/if}}

http://git-wip-us.apache.org/repos/asf/ambari/blob/1ab3bb55/ambari-web/app/views/main/dashboard/config_history_view.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views/main/dashboard/config_history_view.js b/ambari-web/app/views/main/dashboard/config_history_view.js
index 72d37eb..feeae73 100644
--- a/ambari-web/app/views/main/dashboard/config_history_view.js
+++ b/ambari-web/app/views/main/dashboard/config_history_view.js
@@ -77,7 +77,6 @@ App.MainConfigHistoryView = App.TableView.extend({
     this.addObserver('displayLength', this, 'updatePagination');
     this.set('controller.isPolling', true);
     this.get('controller').doPolling();
-    //App.tooltip(this.$("[rel='currentTooltip']"));
   },
 
   /**
@@ -132,8 +131,12 @@ App.MainConfigHistoryView = App.TableView.extend({
     column: 2,
     fieldType: 'filter-input-width',
     content: function () {
-      return ['All'].concat(['g1','g2','gn']);
-    }.property('App.router.clusterController.isLoaded'),
+      var groupName = App.ServiceConfigVersion.find().mapProperty('groupName').uniq();
+      if (groupName.indexOf(null) > -1 ){
+        groupName.splice(groupName.indexOf(null), 1);
+      }
+      return ['All'].concat(groupName);
+    }.property('App.router.mainConfigHistoryController.content'),
     onChangeValue: function () {
       this.get('parentView').updateFilter(this.get('column'), this.get('actualValue'), 'select');
     },
@@ -187,7 +190,7 @@ App.MainConfigHistoryView = App.TableView.extend({
   ConfigVersionView: Em.View.extend({
     tagName: 'tr',
     didInsertElement: function(){
-      App.tooltip(this.$("[rel='currentTooltip']"));
+      App.tooltip(this.$("[rel='Tooltip']"));
     }
   }),
 
@@ -196,7 +199,6 @@ App.MainConfigHistoryView = App.TableView.extend({
    */
   refresh: function () {
     var self = this;
-
     this.set('filteringComplete', false);
     this.get('controller').load().done(function () {
       self.set('filteringComplete', true);


[17/35] git commit: AMBARI-7016 Tooltip for Final, Override and Remove buttons on Configs tab not present somethimes. (Max Shepel via atkach)

Posted by jo...@apache.org.
AMBARI-7016 Tooltip for Final, Override and Remove buttons on Configs tab not present somethimes. (Max Shepel via atkach)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/6d098ca8
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/6d098ca8
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/6d098ca8

Branch: refs/heads/branch-alerts-dev
Commit: 6d098ca8af8ba2244469a832b2ff4b1d64160760
Parents: 9faeaf5
Author: atkach <at...@hortonworks.com>
Authored: Tue Aug 26 18:54:34 2014 +0300
Committer: atkach <at...@hortonworks.com>
Committed: Tue Aug 26 18:54:34 2014 +0300

----------------------------------------------------------------------
 ambari-web/app/views/common/configs/overriddenProperty_view.js | 1 +
 ambari-web/app/views/common/configs/services_config.js         | 3 ++-
 2 files changed, 3 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/6d098ca8/ambari-web/app/views/common/configs/overriddenProperty_view.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views/common/configs/overriddenProperty_view.js b/ambari-web/app/views/common/configs/overriddenProperty_view.js
index fc73474..079084e 100644
--- a/ambari-web/app/views/common/configs/overriddenProperty_view.js
+++ b/ambari-web/app/views/common/configs/overriddenProperty_view.js
@@ -25,6 +25,7 @@ App.ServiceConfigView.SCPOverriddenRowsView = Ember.View.extend({
   categoryConfigs: null, // just declared as viewClass need it
 
   didInsertElement: function (){
+    Em.$('body>.tooltip').remove();
     if (this.get('isDefaultGroupSelected')) {
       var overrides = this.get('serviceConfigProperty.overrides');
       overrides.forEach(function(overriddenSCP) {

http://git-wip-us.apache.org/repos/asf/ambari/blob/6d098ca8/ambari-web/app/views/common/configs/services_config.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views/common/configs/services_config.js b/ambari-web/app/views/common/configs/services_config.js
index abdaacf..2fc787f 100644
--- a/ambari-web/app/views/common/configs/services_config.js
+++ b/ambari-web/app/views/common/configs/services_config.js
@@ -448,7 +448,8 @@ App.ServiceConfigsByCategoryView = Ember.View.extend(App.UserPref, {
     } else {
       this.$('.accordion-body').show();
     }
-    App.tooltip(this.$('[data-toggle=tooltip]'),{
+    $('body').tooltip({
+      selector: '[data-toggle=tooltip]',
       placement: 'top'
     });
     this.updateReadOnlyFlags();


[11/35] git commit: AMBARI-7012. HDP-artifacts tmp directory should not be used at all (aonishuk)

Posted by jo...@apache.org.
AMBARI-7012. HDP-artifacts tmp directory should not be used at all (aonishuk)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/90940781
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/90940781
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/90940781

Branch: refs/heads/branch-alerts-dev
Commit: 90940781212fbc8c87149bcad99d9292886d8d10
Parents: 8bed259
Author: Andrew Onishuk <ao...@hortonworks.com>
Authored: Tue Aug 26 15:34:16 2014 +0300
Committer: Andrew Onishuk <ao...@hortonworks.com>
Committed: Tue Aug 26 15:34:16 2014 +0300

----------------------------------------------------------------------
 .../main/resources/custom_actions/check_host.py |  7 +++--
 .../1.3.2/hooks/before-ANY/scripts/params.py    |  3 +-
 .../hooks/before-INSTALL/scripts/params.py      |  2 +-
 .../services/HIVE/package/scripts/params.py     |  2 +-
 .../2.0.6/hooks/before-ANY/scripts/params.py    |  3 +-
 .../hooks/before-INSTALL/scripts/params.py      |  2 +-
 .../services/HIVE/package/scripts/params.py     |  2 +-
 ambari-server/src/test/python/TestCheckHost.py  | 29 ++++++++++++--------
 .../stacks/1.3.2/HIVE/test_hive_metastore.py    |  4 +--
 .../stacks/1.3.2/HIVE/test_hive_server.py       |  4 +--
 .../1.3.2/hooks/before-ANY/test_before_any.py   |  6 ++--
 .../hooks/before-INSTALL/test_before_install.py |  4 +--
 .../stacks/2.0.6/HIVE/test_hive_metastore.py    |  4 +--
 .../stacks/2.0.6/HIVE/test_hive_server.py       |  4 +--
 .../2.0.6/hooks/before-ANY/test_before_any.py   |  6 ++--
 .../hooks/before-INSTALL/test_before_install.py |  4 +--
 .../stacks/2.1/HIVE/test_hive_metastore.py      |  4 +--
 17 files changed, 50 insertions(+), 40 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/90940781/ambari-server/src/main/resources/custom_actions/check_host.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/custom_actions/check_host.py b/ambari-server/src/main/resources/custom_actions/check_host.py
index 3543f96..8e9b222 100644
--- a/ambari-server/src/main/resources/custom_actions/check_host.py
+++ b/ambari-server/src/main/resources/custom_actions/check_host.py
@@ -46,6 +46,7 @@ JDBC_DRIVER_SYMLINK_POSTGRESQL = "postgres-jdbc-driver.jar"
 class CheckHost(Script):
   def actionexecute(self, env):
     config = Script.get_config()
+    tmp_dir = Script.get_tmp_dir()
 
     #print "CONFIG: " + str(config)
 
@@ -64,7 +65,7 @@ class CheckHost(Script):
 
     if CHECK_DB_CONNECTION in check_execute_list:
       try :
-        db_connection_check_structured_output = self.execute_db_connection_check(config)
+        db_connection_check_structured_output = self.execute_db_connection_check(config, tmp_dir)
         structured_output[CHECK_DB_CONNECTION] = db_connection_check_structured_output
       except Exception, exception:
         print "There was an unknown error while checking database connectivity: " + str(exception)
@@ -97,7 +98,7 @@ class CheckHost(Script):
     return java_home_check_structured_output
 
 
-  def execute_db_connection_check(self, config):
+  def execute_db_connection_check(self, config, tmp_dir):
     print "DB connection check started."
   
     # initialize needed data
@@ -135,7 +136,7 @@ class CheckHost(Script):
       return db_connection_check_structured_output
 
     environment = { "no_proxy": format("{ambari_server_hostname}") }
-    artifact_dir = "/tmp/HDP-artifacts/"
+    artifact_dir = format("{tmp_dir}/AMBARI-artifacts/")
     java_dir = os.path.dirname(java64_home)
 
     # download and install java if it doesn't exists

http://git-wip-us.apache.org/repos/asf/ambari/blob/90940781/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-ANY/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-ANY/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-ANY/scripts/params.py
index c7b01d6..0c6a415 100644
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-ANY/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-ANY/scripts/params.py
@@ -20,11 +20,12 @@ limitations under the License.
 from resource_management import *
 
 config = Script.get_config()
+tmp_dir = Script.get_tmp_dir()
 
 _authentication = config['configurations']['core-site']['hadoop.security.authentication']
 security_enabled = ( not is_empty(_authentication) and _authentication == 'kerberos')
 
-artifact_dir = "/tmp/HDP-artifacts/"
+artifact_dir = format("{tmp_dir}/AMBARI-artifacts/")
 jce_policy_zip = default("/hostLevelParams/jce_name", None) # None when jdk is already installed by user
 jce_location = config['hostLevelParams']['jdk_location']
 jdk_name = default("/hostLevelParams/jdk_name", None)

http://git-wip-us.apache.org/repos/asf/ambari/blob/90940781/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-INSTALL/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-INSTALL/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-INSTALL/scripts/params.py
index ce8f34b..2bcbd50 100644
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-INSTALL/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-INSTALL/scripts/params.py
@@ -25,7 +25,7 @@ config = Script.get_config()
 tmp_dir = Script.get_tmp_dir()
 
 #java params
-artifact_dir = "/tmp/HDP-artifacts/"
+artifact_dir = format("{tmp_dir}/AMBARI-artifacts/")
 jdk_name = default("/hostLevelParams/jdk_name", None) # None when jdk is already installed by user
 jce_policy_zip = default("/hostLevelParams/jce_name", None) # None when jdk is already installed by user
 jce_location = config['hostLevelParams']['jdk_location']

http://git-wip-us.apache.org/repos/asf/ambari/blob/90940781/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/params.py
index ef26418..7ae8db4 100644
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/params.py
@@ -91,7 +91,7 @@ driver_curl_target = format("{java_share_dir}/{jdbc_jar_name}")
 
 hdfs_user =  config['configurations']['hadoop-env']['hdfs_user']
 user_group = config['configurations']['hadoop-env']['user_group']
-artifact_dir = "/tmp/HDP-artifacts/"
+artifact_dir = format("{tmp_dir}/AMBARI-artifacts/")
 
 target = format("{hive_lib}/{jdbc_jar_name}")
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/90940781/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py
index c7b01d6..0c6a415 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py
@@ -20,11 +20,12 @@ limitations under the License.
 from resource_management import *
 
 config = Script.get_config()
+tmp_dir = Script.get_tmp_dir()
 
 _authentication = config['configurations']['core-site']['hadoop.security.authentication']
 security_enabled = ( not is_empty(_authentication) and _authentication == 'kerberos')
 
-artifact_dir = "/tmp/HDP-artifacts/"
+artifact_dir = format("{tmp_dir}/AMBARI-artifacts/")
 jce_policy_zip = default("/hostLevelParams/jce_name", None) # None when jdk is already installed by user
 jce_location = config['hostLevelParams']['jdk_location']
 jdk_name = default("/hostLevelParams/jdk_name", None)

http://git-wip-us.apache.org/repos/asf/ambari/blob/90940781/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-INSTALL/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-INSTALL/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-INSTALL/scripts/params.py
index bb4ee71..f76cc33 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-INSTALL/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-INSTALL/scripts/params.py
@@ -100,7 +100,7 @@ security_enabled = ( not is_empty(_authentication) and _authentication == 'kerbe
 
 #java params
 java_home = config['hostLevelParams']['java_home']
-artifact_dir = "/tmp/HDP-artifacts/"
+artifact_dir = format("{tmp_dir}/AMBARI-artifacts/")
 jdk_name = default("/hostLevelParams/jdk_name", None) # None when jdk is already installed by user
 jce_policy_zip = default("/hostLevelParams/jce_name", None) # None when jdk is already installed by user
 jce_location = config['hostLevelParams']['jdk_location']

http://git-wip-us.apache.org/repos/asf/ambari/blob/90940781/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/params.py
index 8a82717..6cd173b 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/params.py
@@ -95,7 +95,7 @@ driver_curl_target = format("{java_share_dir}/{jdbc_jar_name}")
 
 hdfs_user =  config['configurations']['hadoop-env']['hdfs_user']
 user_group = config['configurations']['hadoop-env']['user_group']
-artifact_dir = "/tmp/HDP-artifacts/"
+artifact_dir = format("{tmp_dir}/AMBARI-artifacts/")
 
 target = format("{hive_lib}/{jdbc_jar_name}")
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/90940781/ambari-server/src/test/python/TestCheckHost.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/TestCheckHost.py b/ambari-server/src/test/python/TestCheckHost.py
index c20f428..2f2a54a 100644
--- a/ambari-server/src/test/python/TestCheckHost.py
+++ b/ambari-server/src/test/python/TestCheckHost.py
@@ -32,10 +32,12 @@ class TestCheckHost(TestCase):
 
   @patch("os.path.isfile")
   @patch.object(Script, 'get_config')
+  @patch.object(Script, 'get_tmp_dir')
   @patch("resource_management.libraries.script.Script.put_structured_out")
-  def testJavaHomeAvailableCheck(self, structured_out_mock, mock_config, os_isfile_mock):
+  def testJavaHomeAvailableCheck(self, structured_out_mock, get_tmp_dir_mock, mock_config, os_isfile_mock):
     # test, java home exists
     os_isfile_mock.return_value = True
+    get_tmp_dir_mock.return_value = "/tmp"
     mock_config.return_value = {"commandParams" : {"check_execute_list" : "java_home_check",
                                                    "java_home" : "test_java_home"}}
 
@@ -57,12 +59,13 @@ class TestCheckHost(TestCase):
 
 
   @patch.object(Script, 'get_config')
+  @patch.object(Script, 'get_tmp_dir')
   @patch("check_host.Execute")
   @patch("resource_management.libraries.script.Script.put_structured_out")
   @patch("subprocess.Popen")
   @patch("check_host.format")
   @patch("os.path.isfile")
-  def testDBConnectionCheck(self, isfile_mock, format_mock, popenMock, structured_out_mock, execute_mock, mock_config):
+  def testDBConnectionCheck(self, isfile_mock, format_mock, popenMock, structured_out_mock, execute_mock, get_tmp_dir_mock, mock_config):
     # test, download DBConnectionVerification.jar failed
     mock_config.return_value = {"commandParams" : {"check_execute_list" : "db_connection_check",
                                                    "java_home" : "test_java_home",
@@ -73,7 +76,7 @@ class TestCheckHost(TestCase):
                                                    "user_name" : "test_user_name",
                                                    "user_passwd" : "test_user_passwd",
                                                    "jdk_name" : "test_jdk_name"}}
-
+    get_tmp_dir_mock.return_value = "/tmp"
     execute_mock.side_effect = Exception("test exception")
     isfile_mock.return_value = True
     checkHost = CheckHost()
@@ -83,10 +86,10 @@ class TestCheckHost(TestCase):
                      'DBConnectionVerification.jar from Ambari Server resources. Check network access to Ambari ' \
                      'Server.\ntest exception', 'exit_code': 1}})
     
-    self.assertEquals(format_mock.call_args_list[1][0][0], "/bin/sh -c 'cd /usr/lib/ambari-agent/ && curl -kf " \
+    self.assertEquals(format_mock.call_args_list[2][0][0], "/bin/sh -c 'cd /usr/lib/ambari-agent/ && curl -kf " \
                       "--retry 5 {jdk_location}{check_db_connection_jar_name} -o {check_db_connection_jar_name}'")
     
-    self.assertEquals(format_mock.call_args_list[2][0][0], "[ -f /usr/lib/ambari-agent/{check_db_connection_jar_name}]")
+    self.assertEquals(format_mock.call_args_list[3][0][0], "[ -f /usr/lib/ambari-agent/{check_db_connection_jar_name}]")
 
     # test, download jdbc driver failed
     mock_config.return_value = {"commandParams" : {"check_execute_list" : "db_connection_check",
@@ -111,10 +114,10 @@ class TestCheckHost(TestCase):
                   'Server host to make the JDBC driver available for download and to enable testing '
                   'the database connection.\n')
     self.assertEquals(structured_out_mock.call_args[0][0]['db_connection_check']['exit_code'], 1)
-    self.assertEquals(format_mock.call_args_list[3][0][0], "/bin/sh -c 'cd /usr/lib/ambari-agent/ && curl -kf " \
+    self.assertEquals(format_mock.call_args_list[4][0][0], "/bin/sh -c 'cd /usr/lib/ambari-agent/ && curl -kf " \
                                                             "--retry 5 {jdbc_url} -o {jdbc_name}'")
     
-    self.assertEquals(format_mock.call_args_list[4][0][0], "[ -f /usr/lib/ambari-agent/{jdbc_name}]")
+    self.assertEquals(format_mock.call_args_list[5][0][0], "[ -f /usr/lib/ambari-agent/{jdbc_name}]")
 
     # test, no connection to remote db
     mock_config.return_value = {"commandParams" : {"check_execute_list" : "db_connection_check",
@@ -172,8 +175,9 @@ class TestCheckHost(TestCase):
 
   @patch("socket.gethostbyname")
   @patch.object(Script, 'get_config')
+  @patch.object(Script, 'get_tmp_dir')
   @patch("resource_management.libraries.script.Script.put_structured_out")
-  def testHostResolution(self, structured_out_mock, mock_config, mock_socket):
+  def testHostResolution(self, structured_out_mock, get_tmp_dir_mock, mock_config, mock_socket):
     mock_socket.return_value = "192.168.1.1"    
     jsonFilePath = os.path.join("../resources/custom_actions", "check_host_ip_addresses.json")
     
@@ -181,7 +185,8 @@ class TestCheckHost(TestCase):
       jsonPayload = json.load(jsonFile)
  
     mock_config.return_value = ConfigDictionary(jsonPayload)
-    
+    get_tmp_dir_mock.return_value = "/tmp"
+
     checkHost = CheckHost()
     checkHost.actionexecute(None)
     
@@ -209,15 +214,17 @@ class TestCheckHost(TestCase):
        'failed_count': 5, 'success_count': 0, 'exit_code': 0}})
     
   @patch.object(Script, 'get_config')
+  @patch.object(Script, 'get_tmp_dir')
   @patch("resource_management.libraries.script.Script.put_structured_out")
-  def testInvalidCheck(self, structured_out_mock, mock_config):    
+  def testInvalidCheck(self, structured_out_mock, get_tmp_dir_mock, mock_config):
     jsonFilePath = os.path.join("../resources/custom_actions", "invalid_check.json")
     
     with open(jsonFilePath, "r") as jsonFile:
       jsonPayload = json.load(jsonFile)
  
     mock_config.return_value = ConfigDictionary(jsonPayload)
-    
+    get_tmp_dir_mock.return_value = "tmp"
+
     checkHost = CheckHost()
     checkHost.actionexecute(None)
     

http://git-wip-us.apache.org/repos/asf/ambari/blob/90940781/ambari-server/src/test/python/stacks/1.3.2/HIVE/test_hive_metastore.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/1.3.2/HIVE/test_hive_metastore.py b/ambari-server/src/test/python/stacks/1.3.2/HIVE/test_hive_metastore.py
index 1fbe8ec..fc8392d 100644
--- a/ambari-server/src/test/python/stacks/1.3.2/HIVE/test_hive_metastore.py
+++ b/ambari-server/src/test/python/stacks/1.3.2/HIVE/test_hive_metastore.py
@@ -159,7 +159,7 @@ class TestHiveMetastore(RMFTestCase):
     self.assertNoMoreResources()
 
   def assert_configure_default(self):
-    self.assertResourceCalled('Execute', 'hive mkdir -p /tmp/HDP-artifacts/ ; cp /usr/share/java/mysql-connector-java.jar /usr/lib/hive/lib//mysql-connector-java.jar',
+    self.assertResourceCalled('Execute', 'hive mkdir -p /tmp/AMBARI-artifacts/ ; cp /usr/share/java/mysql-connector-java.jar /usr/lib/hive/lib//mysql-connector-java.jar',
       creates = '/usr/lib/hive/lib//mysql-connector-java.jar',
       path = ['/bin', '/usr/bin/'],
       not_if = 'test -f /usr/lib/hive/lib//mysql-connector-java.jar',
@@ -218,7 +218,7 @@ class TestHiveMetastore(RMFTestCase):
     )
 
   def assert_configure_secured(self):
-    self.assertResourceCalled('Execute', 'hive mkdir -p /tmp/HDP-artifacts/ ; cp /usr/share/java/mysql-connector-java.jar /usr/lib/hive/lib//mysql-connector-java.jar',
+    self.assertResourceCalled('Execute', 'hive mkdir -p /tmp/AMBARI-artifacts/ ; cp /usr/share/java/mysql-connector-java.jar /usr/lib/hive/lib//mysql-connector-java.jar',
       creates = '/usr/lib/hive/lib//mysql-connector-java.jar',
       path = ['/bin', '/usr/bin/'],
       not_if = 'test -f /usr/lib/hive/lib//mysql-connector-java.jar',

http://git-wip-us.apache.org/repos/asf/ambari/blob/90940781/ambari-server/src/test/python/stacks/1.3.2/HIVE/test_hive_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/1.3.2/HIVE/test_hive_server.py b/ambari-server/src/test/python/stacks/1.3.2/HIVE/test_hive_server.py
index 24cb274..a92df36 100644
--- a/ambari-server/src/test/python/stacks/1.3.2/HIVE/test_hive_server.py
+++ b/ambari-server/src/test/python/stacks/1.3.2/HIVE/test_hive_server.py
@@ -205,7 +205,7 @@ class TestHiveServer(RMFTestCase):
                               kinit_path_local = "/usr/bin/kinit",
                               action = ['create'],
                               )
-    self.assertResourceCalled('Execute', 'hive mkdir -p /tmp/HDP-artifacts/ ; cp /usr/share/java/mysql-connector-java.jar /usr/lib/hive/lib//mysql-connector-java.jar',
+    self.assertResourceCalled('Execute', 'hive mkdir -p /tmp/AMBARI-artifacts/ ; cp /usr/share/java/mysql-connector-java.jar /usr/lib/hive/lib//mysql-connector-java.jar',
       creates = '/usr/lib/hive/lib//mysql-connector-java.jar',
       path = ['/bin', '/usr/bin/'],
       not_if = 'test -f /usr/lib/hive/lib//mysql-connector-java.jar',
@@ -292,7 +292,7 @@ class TestHiveServer(RMFTestCase):
                               kinit_path_local = '/usr/bin/kinit',
                               action = ['create'],
                               )
-    self.assertResourceCalled('Execute', 'hive mkdir -p /tmp/HDP-artifacts/ ; cp /usr/share/java/mysql-connector-java.jar /usr/lib/hive/lib//mysql-connector-java.jar',
+    self.assertResourceCalled('Execute', 'hive mkdir -p /tmp/AMBARI-artifacts/ ; cp /usr/share/java/mysql-connector-java.jar /usr/lib/hive/lib//mysql-connector-java.jar',
       creates = '/usr/lib/hive/lib//mysql-connector-java.jar',
       path = ['/bin', '/usr/bin/'],
       not_if = 'test -f /usr/lib/hive/lib//mysql-connector-java.jar',

http://git-wip-us.apache.org/repos/asf/ambari/blob/90940781/ambari-server/src/test/python/stacks/1.3.2/hooks/before-ANY/test_before_any.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/1.3.2/hooks/before-ANY/test_before_any.py b/ambari-server/src/test/python/stacks/1.3.2/hooks/before-ANY/test_before_any.py
index fe623a5..bdf0fa4 100644
--- a/ambari-server/src/test/python/stacks/1.3.2/hooks/before-ANY/test_before_any.py
+++ b/ambari-server/src/test/python/stacks/1.3.2/hooks/before-ANY/test_before_any.py
@@ -30,10 +30,10 @@ class TestHookBeforeInstall(RMFTestCase):
                        command="hook",
                        config_file="default.json"
     )
-    self.assertResourceCalled('Execute', 'mkdir -p /tmp/HDP-artifacts/;     curl -kf -x "" --retry 10     http://c6401.ambari.apache.org:8080/resources//UnlimitedJCEPolicyJDK7.zip -o /tmp/HDP-artifacts//UnlimitedJCEPolicyJDK7.zip',
+    self.assertResourceCalled('Execute', 'mkdir -p /tmp/AMBARI-artifacts/;     curl -kf -x "" --retry 10     http://c6401.ambari.apache.org:8080/resources//UnlimitedJCEPolicyJDK7.zip -o /tmp/AMBARI-artifacts//UnlimitedJCEPolicyJDK7.zip',
         environment = {'no_proxy': 'c6401.ambari.apache.org'},
-        not_if = 'test -e /tmp/HDP-artifacts//UnlimitedJCEPolicyJDK7.zip',
+        not_if = 'test -e /tmp/AMBARI-artifacts//UnlimitedJCEPolicyJDK7.zip',
         ignore_failures = True,
         path = ['/bin', '/usr/bin/'],
     )
-    self.assertNoMoreResources()
\ No newline at end of file
+    self.assertNoMoreResources()

http://git-wip-us.apache.org/repos/asf/ambari/blob/90940781/ambari-server/src/test/python/stacks/1.3.2/hooks/before-INSTALL/test_before_install.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/1.3.2/hooks/before-INSTALL/test_before_install.py b/ambari-server/src/test/python/stacks/1.3.2/hooks/before-INSTALL/test_before_install.py
index 653b47c..25f85e9 100644
--- a/ambari-server/src/test/python/stacks/1.3.2/hooks/before-INSTALL/test_before_install.py
+++ b/ambari-server/src/test/python/stacks/1.3.2/hooks/before-INSTALL/test_before_install.py
@@ -38,12 +38,12 @@ class TestHookBeforeInstall(RMFTestCase):
         repo_file_name='HDP',
         repo_template='repo_suse_rhel.j2'
     )
-    self.assertResourceCalled('Execute', 'mkdir -p /tmp/HDP-artifacts/ ; curl -kf -x \"\" --retry 10 http://c6401.ambari.apache.org:8080/resources//jdk-7u45-linux-x64.tar.gz -o /tmp/HDP-artifacts//jdk-7u45-linux-x64.tar.gz',
+    self.assertResourceCalled('Execute', 'mkdir -p /tmp/AMBARI-artifacts/ ; curl -kf -x \"\" --retry 10 http://c6401.ambari.apache.org:8080/resources//jdk-7u45-linux-x64.tar.gz -o /tmp/AMBARI-artifacts//jdk-7u45-linux-x64.tar.gz',
         not_if = 'test -e /usr/jdk64/jdk1.7.0_45/bin/java',
         path = ['/bin', '/usr/bin/'],
         environment = {'no_proxy': 'c6401.ambari.apache.org'}
     )
-    self.assertResourceCalled('Execute', 'mkdir -p /usr/jdk64 ; cd /usr/jdk64 ; tar -xf /tmp/HDP-artifacts//jdk-7u45-linux-x64.tar.gz > /dev/null 2>&1',
+    self.assertResourceCalled('Execute', 'mkdir -p /usr/jdk64 ; cd /usr/jdk64 ; tar -xf /tmp/AMBARI-artifacts//jdk-7u45-linux-x64.tar.gz > /dev/null 2>&1',
         not_if = 'test -e /usr/jdk64/jdk1.7.0_45/bin/java',
         path = ['/bin', '/usr/bin/'],
     )

http://git-wip-us.apache.org/repos/asf/ambari/blob/90940781/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_metastore.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_metastore.py b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_metastore.py
index 653e764..a56c26b 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_metastore.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_metastore.py
@@ -144,7 +144,7 @@ class TestHiveMetastore(RMFTestCase):
     self.assertNoMoreResources()
 
   def assert_configure_default(self):
-    self.assertResourceCalled('Execute', 'hive mkdir -p /tmp/HDP-artifacts/ ; cp /usr/share/java/mysql-connector-java.jar /usr/lib/hive/lib//mysql-connector-java.jar',
+    self.assertResourceCalled('Execute', 'hive mkdir -p /tmp/AMBARI-artifacts/ ; cp /usr/share/java/mysql-connector-java.jar /usr/lib/hive/lib//mysql-connector-java.jar',
       creates = '/usr/lib/hive/lib//mysql-connector-java.jar',
       path = ['/bin', '/usr/bin/'],
       not_if = 'test -f /usr/lib/hive/lib//mysql-connector-java.jar',
@@ -211,7 +211,7 @@ class TestHiveMetastore(RMFTestCase):
     )
 
   def assert_configure_secured(self):
-    self.assertResourceCalled('Execute', 'hive mkdir -p /tmp/HDP-artifacts/ ; cp /usr/share/java/mysql-connector-java.jar /usr/lib/hive/lib//mysql-connector-java.jar',
+    self.assertResourceCalled('Execute', 'hive mkdir -p /tmp/AMBARI-artifacts/ ; cp /usr/share/java/mysql-connector-java.jar /usr/lib/hive/lib//mysql-connector-java.jar',
       creates = '/usr/lib/hive/lib//mysql-connector-java.jar',
       path = ['/bin', '/usr/bin/'],
       not_if = 'test -f /usr/lib/hive/lib//mysql-connector-java.jar',

http://git-wip-us.apache.org/repos/asf/ambari/blob/90940781/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py
index 8eb6bd2..0350589 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py
@@ -257,7 +257,7 @@ class TestHiveServer(RMFTestCase):
                               kinit_path_local = "/usr/bin/kinit",
                               action = ['create'],
                               )
-    self.assertResourceCalled('Execute', 'hive mkdir -p /tmp/HDP-artifacts/ ; cp /usr/share/java/mysql-connector-java.jar /usr/lib/hive/lib//mysql-connector-java.jar',
+    self.assertResourceCalled('Execute', 'hive mkdir -p /tmp/AMBARI-artifacts/ ; cp /usr/share/java/mysql-connector-java.jar /usr/lib/hive/lib//mysql-connector-java.jar',
       creates = '/usr/lib/hive/lib//mysql-connector-java.jar',
       path = ['/bin', '/usr/bin/'],
       not_if = 'test -f /usr/lib/hive/lib//mysql-connector-java.jar',
@@ -353,7 +353,7 @@ class TestHiveServer(RMFTestCase):
                               action = ['create'],
                               )
 
-    self.assertResourceCalled('Execute', 'hive mkdir -p /tmp/HDP-artifacts/ ; cp /usr/share/java/mysql-connector-java.jar /usr/lib/hive/lib//mysql-connector-java.jar',
+    self.assertResourceCalled('Execute', 'hive mkdir -p /tmp/AMBARI-artifacts/ ; cp /usr/share/java/mysql-connector-java.jar /usr/lib/hive/lib//mysql-connector-java.jar',
       creates = '/usr/lib/hive/lib//mysql-connector-java.jar',
       path = ['/bin', '/usr/bin/'],
       not_if = 'test -f /usr/lib/hive/lib//mysql-connector-java.jar',

http://git-wip-us.apache.org/repos/asf/ambari/blob/90940781/ambari-server/src/test/python/stacks/2.0.6/hooks/before-ANY/test_before_any.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/hooks/before-ANY/test_before_any.py b/ambari-server/src/test/python/stacks/2.0.6/hooks/before-ANY/test_before_any.py
index 3abfa57..d1c4e45 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/hooks/before-ANY/test_before_any.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/hooks/before-ANY/test_before_any.py
@@ -30,10 +30,10 @@ class TestHookBeforeInstall(RMFTestCase):
                        command="hook",
                        config_file="default.json"
     )
-    self.assertResourceCalled('Execute', 'mkdir -p /tmp/HDP-artifacts/;     curl -kf -x "" --retry 10     http://c6401.ambari.apache.org:8080/resources//UnlimitedJCEPolicyJDK7.zip -o /tmp/HDP-artifacts//UnlimitedJCEPolicyJDK7.zip',
+    self.assertResourceCalled('Execute', 'mkdir -p /tmp/AMBARI-artifacts/;     curl -kf -x "" --retry 10     http://c6401.ambari.apache.org:8080/resources//UnlimitedJCEPolicyJDK7.zip -o /tmp/AMBARI-artifacts//UnlimitedJCEPolicyJDK7.zip',
         environment = {'no_proxy': 'c6401.ambari.apache.org'},
-        not_if = 'test -e /tmp/HDP-artifacts//UnlimitedJCEPolicyJDK7.zip',
+        not_if = 'test -e /tmp/AMBARI-artifacts//UnlimitedJCEPolicyJDK7.zip',
         ignore_failures = True,
         path = ['/bin', '/usr/bin/'],
     )
-    self.assertNoMoreResources()
\ No newline at end of file
+    self.assertNoMoreResources()

http://git-wip-us.apache.org/repos/asf/ambari/blob/90940781/ambari-server/src/test/python/stacks/2.0.6/hooks/before-INSTALL/test_before_install.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/hooks/before-INSTALL/test_before_install.py b/ambari-server/src/test/python/stacks/2.0.6/hooks/before-INSTALL/test_before_install.py
index 9dfcff2..a26798e 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/hooks/before-INSTALL/test_before_install.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/hooks/before-INSTALL/test_before_install.py
@@ -40,12 +40,12 @@ class TestHookBeforeInstall(RMFTestCase):
     )
     self.assertResourceCalled('Package', 'unzip',)
     self.assertResourceCalled('Package', 'curl',)
-    self.assertResourceCalled('Execute', 'mkdir -p /tmp/HDP-artifacts/ ;   curl -kf -x \"\"   --retry 10 http://c6401.ambari.apache.org:8080/resources//jdk-7u45-linux-x64.tar.gz -o /tmp/HDP-artifacts//jdk-7u45-linux-x64.tar.gz',
+    self.assertResourceCalled('Execute', 'mkdir -p /tmp/AMBARI-artifacts/ ;   curl -kf -x \"\"   --retry 10 http://c6401.ambari.apache.org:8080/resources//jdk-7u45-linux-x64.tar.gz -o /tmp/AMBARI-artifacts//jdk-7u45-linux-x64.tar.gz',
         not_if = 'test -e /usr/jdk64/jdk1.7.0_45/bin/java',
         path = ['/bin', '/usr/bin/'],
         environment = {'no_proxy': 'c6401.ambari.apache.org'},
     )
-    self.assertResourceCalled('Execute', 'mkdir -p /usr/jdk64 ; cd /usr/jdk64 ; tar -xf /tmp/HDP-artifacts//jdk-7u45-linux-x64.tar.gz > /dev/null 2>&1',
+    self.assertResourceCalled('Execute', 'mkdir -p /usr/jdk64 ; cd /usr/jdk64 ; tar -xf /tmp/AMBARI-artifacts//jdk-7u45-linux-x64.tar.gz > /dev/null 2>&1',
         not_if = 'test -e /usr/jdk64/jdk1.7.0_45/bin/java',
         path = ['/bin', '/usr/bin/'],
     )

http://git-wip-us.apache.org/repos/asf/ambari/blob/90940781/ambari-server/src/test/python/stacks/2.1/HIVE/test_hive_metastore.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.1/HIVE/test_hive_metastore.py b/ambari-server/src/test/python/stacks/2.1/HIVE/test_hive_metastore.py
index fd4ee43..aba747c 100644
--- a/ambari-server/src/test/python/stacks/2.1/HIVE/test_hive_metastore.py
+++ b/ambari-server/src/test/python/stacks/2.1/HIVE/test_hive_metastore.py
@@ -104,7 +104,7 @@ class TestHiveMetastore(RMFTestCase):
     self.assertNoMoreResources()
 
   def assert_configure_default(self):
-    self.assertResourceCalled('Execute', 'hive mkdir -p /tmp/HDP-artifacts/ ; cp /usr/share/java/mysql-connector-java.jar /usr/lib/hive/lib//mysql-connector-java.jar',
+    self.assertResourceCalled('Execute', 'hive mkdir -p /tmp/AMBARI-artifacts/ ; cp /usr/share/java/mysql-connector-java.jar /usr/lib/hive/lib//mysql-connector-java.jar',
       creates = '/usr/lib/hive/lib//mysql-connector-java.jar',
       path = ['/bin', '/usr/bin/'],
       not_if = 'test -f /usr/lib/hive/lib//mysql-connector-java.jar',
@@ -174,7 +174,7 @@ class TestHiveMetastore(RMFTestCase):
     )
 
   def assert_configure_secured(self):
-    self.assertResourceCalled('Execute', 'hive mkdir -p /tmp/HDP-artifacts/ ; cp /usr/share/java/mysql-connector-java.jar /usr/lib/hive/lib//mysql-connector-java.jar',
+    self.assertResourceCalled('Execute', 'hive mkdir -p /tmp/AMBARI-artifacts/ ; cp /usr/share/java/mysql-connector-java.jar /usr/lib/hive/lib//mysql-connector-java.jar',
       creates = '/usr/lib/hive/lib//mysql-connector-java.jar',
       path = ['/bin', '/usr/bin/'],
       not_if = 'test -f /usr/lib/hive/lib//mysql-connector-java.jar',


[03/35] git commit: AMBARI-6993. links to mailing list archives from subscribe page. (yusaku)

Posted by jo...@apache.org.
AMBARI-6993. links to mailing list archives from subscribe page. (yusaku)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/0c1bce05
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/0c1bce05
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/0c1bce05

Branch: refs/heads/branch-alerts-dev
Commit: 0c1bce053e99bc0de9bbda8ccabaa6c3d20e6c98
Parents: 2fd2500
Author: Yusaku Sako <yu...@hortonworks.com>
Authored: Mon Aug 25 11:59:41 2014 -0700
Committer: Yusaku Sako <yu...@hortonworks.com>
Committed: Mon Aug 25 12:02:50 2014 -0700

----------------------------------------------------------------------
 docs/pom.xml | 18 +++++++++++++++---
 1 file changed, 15 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/0c1bce05/docs/pom.xml
----------------------------------------------------------------------
diff --git a/docs/pom.xml b/docs/pom.xml
index 3ffc6da..e00a46d 100644
--- a/docs/pom.xml
+++ b/docs/pom.xml
@@ -74,7 +74,7 @@
             <unsubscribe>mailto:user-unsubscribe@ambari.apache.org
             </unsubscribe>
             <post>mailto:user@ambari.apache.org</post>
-            <archive></archive>
+            <archive>http://mail-archives.apache.org/mod_mbox/ambari-user/</archive>
         </mailingList>
         <mailingList>
             <name>Development list</name>
@@ -83,7 +83,7 @@
             <unsubscribe>mailto:dev-unsubscribe@ambari.apache.org
             </unsubscribe>
             <post>mailto:dev@ambari.apache.org</post>
-            <archive></archive>
+            <archive>http://mail-archives.apache.org/mod_mbox/ambari-dev/</archive>
         </mailingList>
         <mailingList>
             <name>Commit list</name>
@@ -92,7 +92,7 @@
             <unsubscribe>mailto:commits-unsubscribe@ambari.apache.org
             </unsubscribe>
             <post>mailto:commits@ambari.apache.org</post>
-            <archive></archive>
+            <archive>http://mail-archives.apache.org/mod_mbox/ambari-commits/</archive>
         </mailingList>
     </mailingLists>
 
@@ -311,6 +311,18 @@
             </organization>            
         </developer>
         <developer>
+            <id>jonathanhurley</id>
+            <name>Jonathan Hurley</name>
+            <email>jonathanhurley@apache.org</email>
+            <timezone>-8</timezone>
+            <roles>
+                <role>Committer</role>
+            </roles>
+            <organization>
+                Hortonworks
+            </organization>
+        </developer>
+        <developer>
             <id>jspeidel</id>
             <name>John Speidel</name>
             <email>jspeidel@apache.org</email>


[34/35] git commit: AMBARI-7034 Update UI unit tests for models. (atkach)

Posted by jo...@apache.org.
AMBARI-7034 Update UI unit tests for models. (atkach)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/a8263345
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/a8263345
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/a8263345

Branch: refs/heads/branch-alerts-dev
Commit: a82633457b56667238d1824b6d901b1a85b530ca
Parents: 1f48b24
Author: atkach <at...@hortonworks.com>
Authored: Wed Aug 27 15:01:58 2014 +0300
Committer: atkach <at...@hortonworks.com>
Committed: Wed Aug 27 15:01:58 2014 +0300

----------------------------------------------------------------------
 ambari-web/app/assets/test/tests.js             |   2 +
 ambari-web/app/models/host_component.js         |  28 +-
 ambari-web/app/models/stack_service.js          |   3 +-
 ambari-web/test/models/host_component_test.js   | 212 ++++++++++-
 ambari-web/test/models/host_test.js             | 357 ++++++++++++++++++-
 ambari-web/test/models/service_test.js          |  59 +--
 ambari-web/test/models/stack_service_test.js    | 343 ++++++++++++++++++
 .../common/configs/config_history_flow_test.js  |  22 +-
 8 files changed, 969 insertions(+), 57 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/a8263345/ambari-web/app/assets/test/tests.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/assets/test/tests.js b/ambari-web/app/assets/test/tests.js
index 985da16..8634d34 100644
--- a/ambari-web/app/assets/test/tests.js
+++ b/ambari-web/app/assets/test/tests.js
@@ -193,6 +193,8 @@ var files = ['test/init_model_test',
   'test/models/run_test',
   'test/models/service_config_test',
   'test/models/stack_service_component_test',
+  'test/models/service_test',
+  'test/models/stack_service_test',
   'test/models/user_test'
 ];
 App.initialize();

http://git-wip-us.apache.org/repos/asf/ambari/blob/a8263345/ambari-web/app/models/host_component.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/models/host_component.js b/ambari-web/app/models/host_component.js
index b437d26..73fc852 100644
--- a/ambari-web/app/models/host_component.js
+++ b/ambari-web/app/models/host_component.js
@@ -32,7 +32,7 @@ App.HostComponent = DS.Model.extend({
    * Determine if component is client
    * @returns {bool}
    */
-  isClient:function () {
+  isClient: function () {
     return App.get('components.clients').contains(this.get('componentName'));
   }.property('componentName'),
   /**
@@ -40,7 +40,7 @@ App.HostComponent = DS.Model.extend({
    * Based on <code>workStatus</code>
    * @returns {bool}
    */
-  isRunning: function(){
+  isRunning: function () {
     return (this.get('workStatus') == 'STARTED' || this.get('workStatus') == 'STARTING');
   }.property('workStatus'),
 
@@ -64,17 +64,17 @@ App.HostComponent = DS.Model.extend({
    * Determine if component is slave
    * @returns {bool}
    */
-  isSlave: function(){
+  isSlave: function () {
     return App.get('components.slaves').contains(this.get('componentName'));
   }.property('componentName'),
   /**
    * Only certain components can be deleted.
-   * They include some from master components, 
-   * some from slave components, and rest from 
+   * They include some from master components,
+   * some from slave components, and rest from
    * client components.
    * @returns {bool}
    */
-  isDeletable: function() {
+  isDeletable: function () {
     return App.get('components.deletable').contains(this.get('componentName'));
   }.property('componentName'),
   /**
@@ -98,19 +98,19 @@ App.HostComponent = DS.Model.extend({
    * User friendly host component status
    * @returns {String}
    */
-  isActive: function() {
+  isActive: function () {
     return (this.get('passiveState') == 'OFF');
   }.property('passiveState'),
 
-  passiveTooltip: function() {
+  passiveTooltip: function () {
     if (!this.get('isActive')) {
       return Em.I18n.t('hosts.component.passive.mode');
     }
   }.property('isActive'),
 
-  statusClass: function() {
+  statusClass: function () {
     return this.get('isActive') ? this.get('workStatus') : 'icon-medkit';
-  }.property('workStatus','isActive'),
+  }.property('workStatus', 'isActive'),
 
   statusIconClass: function () {
     switch (this.get('statusClass')) {
@@ -133,7 +133,7 @@ App.HostComponent = DS.Model.extend({
 
   componentTextStatus: function () {
     return App.HostComponentStatus.getTextStatus(this.get("workStatus"));
-  }.property('workStatus','isDecommissioning')
+  }.property('workStatus', 'isDecommissioning')
 });
 
 App.HostComponent.FIXTURES = [];
@@ -155,8 +155,8 @@ App.HostComponentStatus = {
    * @param {String} value
    * @returns {String}
    */
-  getKeyName:function(value){
-    switch(value){
+  getKeyName: function (value) {
+    switch (value) {
       case this.started:
         return 'started';
       case this.starting:
@@ -213,7 +213,7 @@ App.HostComponentStatus = {
    * Get list of possible <code>App.HostComponent</code> statuses
    * @returns {String[]}
    */
-  getStatusesList: function() {
+  getStatusesList: function () {
     var ret = [];
     for (var st in this) {
       if (this.hasOwnProperty(st) && Em.typeOf(this[st]) == 'string') {

http://git-wip-us.apache.org/repos/asf/ambari/blob/a8263345/ambari-web/app/models/stack_service.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/models/stack_service.js b/ambari-web/app/models/stack_service.js
index 2baf65a..7c69804 100644
--- a/ambari-web/app/models/stack_service.js
+++ b/ambari-web/app/models/stack_service.js
@@ -113,7 +113,7 @@ App.StackService = DS.Model.extend({
       serviceDependencyMap = App.StackService.dependency['HDP-1'];
     }
     for (key in serviceDependencyMap) {
-      if (serviceDependencyMap[key].contains(serviceName)) serviceDependencies.pushObject(key);
+      if (serviceDependencyMap[key].contains(serviceName)) serviceDependencies.push(key);
     }
     return  serviceDependencies;
   }.property('serviceName'),
@@ -178,7 +178,6 @@ App.StackService = DS.Model.extend({
    */
   configCategories: function () {
     var configCategories = [];
-    var serviceName = this.get('serviceName');
     var configTypes = this.get('configTypes');
     var serviceComponents = this.get('serviceComponents');
     if (configTypes && Object.keys(configTypes).length) {

http://git-wip-us.apache.org/repos/asf/ambari/blob/a8263345/ambari-web/test/models/host_component_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/models/host_component_test.js b/ambari-web/test/models/host_component_test.js
index f9f14a0..8e7cedf 100644
--- a/ambari-web/test/models/host_component_test.js
+++ b/ambari-web/test/models/host_component_test.js
@@ -19,10 +19,24 @@
 var App = require('app');
 require('models/host_component');
 
-describe('App.HostComponentStatus', function() {
+describe('App.HostComponent', function() {
+
+  App.store.load(App.HostComponent, {
+    id: 'COMP_host',
+    component_name: 'COMP1'
+  });
+  var hc = App.HostComponent.find('COMP_host');
+
 
   describe('#getStatusesList', function() {
+    it('allowed statuses', function() {
+      var statuses = ["STARTED","STARTING","INSTALLED","STOPPING","INSTALL_FAILED","INSTALLING","UPGRADE_FAILED","UNKNOWN","DISABLED","INIT"];
+      expect(App.HostComponentStatus.getStatusesList()).to.include.members(statuses);
+      expect(statuses).to.include.members(App.HostComponentStatus.getStatusesList());
+    });
+  });
 
+  describe('#getStatusesList', function() {
     it('allowed statuses', function() {
       var statuses = ["STARTED","STARTING","INSTALLED","STOPPING","INSTALL_FAILED","INSTALLING","UPGRADE_FAILED","UNKNOWN","DISABLED","INIT"];
       expect(App.HostComponentStatus.getStatusesList()).to.include.members(statuses);
@@ -30,4 +44,200 @@ describe('App.HostComponentStatus', function() {
     });
   });
 
+  describe('#isClient', function() {
+    it('', function() {
+      sinon.stub(App.get('components.clients'), 'contains', Em.K);
+      hc.propertyDidChange('isClient');
+      hc.get('isClient');
+      expect(App.get('components.clients').contains.calledWith('COMP1')).to.be.true;
+      App.get('components.clients').contains.restore();
+    });
+  });
+
+  describe('#displayName', function() {
+    it('', function() {
+      sinon.stub(App.format, 'role', Em.K);
+      hc.propertyDidChange('displayName');
+      hc.get('displayName');
+      expect(App.format.role.calledWith('COMP1')).to.be.true;
+      App.format.role.restore();
+    });
+  });
+
+  describe('#isMaster', function() {
+    it('', function() {
+      sinon.stub(App.get('components.masters'), 'contains', Em.K);
+      hc.propertyDidChange('isMaster');
+      hc.get('isMaster');
+      expect(App.get('components.masters').contains.calledWith('COMP1')).to.be.true;
+      App.get('components.masters').contains.restore();
+    });
+  });
+
+  describe('#isSlave', function() {
+    it('', function() {
+      sinon.stub(App.get('components.slaves'), 'contains', Em.K);
+      hc.propertyDidChange('isSlave');
+      hc.get('isSlave');
+      expect(App.get('components.slaves').contains.calledWith('COMP1')).to.be.true;
+      App.get('components.slaves').contains.restore();
+    });
+  });
+
+  describe('#isDeletable', function() {
+    it('', function() {
+      sinon.stub(App.get('components.deletable'), 'contains', Em.K);
+      hc.propertyDidChange('isDeletable');
+      hc.get('isDeletable');
+      expect(App.get('components.deletable').contains.calledWith('COMP1')).to.be.true;
+      App.get('components.deletable').contains.restore();
+    });
+  });
+
+  describe('#isRunning', function() {
+    var testCases = [
+      {
+        workStatus: 'INSTALLED',
+        result: false
+      },
+      {
+        workStatus: 'STARTING',
+        result: true
+      },
+      {
+        workStatus: 'STARTED',
+        result: true
+      }
+    ];
+    testCases.forEach(function(test){
+      it('workStatus - ' + test.workStatus, function() {
+        hc.set('workStatus', test.workStatus);
+        hc.propertyDidChange('isRunning');
+        expect(hc.get('isRunning')).to.equal(test.result);
+      });
+    });
+  });
+
+  describe('#isDecommissioning', function() {
+    var mock = [];
+    beforeEach(function () {
+      sinon.stub(App.HDFSService, 'find', function () {
+        return mock;
+      })
+    });
+    afterEach(function () {
+      App.HDFSService.find.restore();
+    });
+    it('component name is not DATANODE', function() {
+      hc.propertyDidChange('isDecommissioning');
+      expect(hc.get('isDecommissioning')).to.be.false;
+    });
+    it('component name is DATANODE but no HDFS service', function() {
+      hc.set('componentName', 'DATANODE');
+      hc.propertyDidChange('isDecommissioning');
+      expect(hc.get('isDecommissioning')).to.be.false;
+    });
+    it('HDFS has no decommission DataNodes', function() {
+      hc.set('componentName', 'DATANODE');
+      mock.push(Em.Object.create({
+        decommissionDataNodes: []
+      }));
+      hc.propertyDidChange('isDecommissioning');
+      expect(hc.get('isDecommissioning')).to.be.false;
+    });
+    it('HDFS has decommission DataNodes', function() {
+      hc.set('componentName', 'DATANODE');
+      hc.set('hostName', 'host1');
+      mock.clear();
+      mock.push(Em.Object.create({
+        decommissionDataNodes: [{hostName: 'host1'}]
+      }));
+      hc.propertyDidChange('isDecommissioning');
+      expect(hc.get('isDecommissioning')).to.be.true;
+    });
+  });
+
+  describe('#isActive', function() {
+    it('passiveState is ON', function() {
+      hc.set('passiveState', "ON");
+      hc.propertyDidChange('isActive');
+      expect(hc.get('isActive')).to.be.false;
+    });
+    it('passiveState is OFF', function() {
+      hc.set('passiveState', "OFF");
+      hc.propertyDidChange('isActive');
+      expect(hc.get('isActive')).to.be.true;
+    });
+  });
+
+  describe('#statusClass', function() {
+    it('isActive is false', function() {
+      hc.reopen({
+        isActive: false
+      });
+      hc.propertyDidChange('statusClass');
+      expect(hc.get('statusClass')).to.equal('icon-medkit');
+    });
+    it('isActive is true', function() {
+      var status = 'INSTALLED';
+      hc.set('isActive', true);
+      hc.set('workStatus', status);
+      hc.propertyDidChange('statusClass');
+      expect(hc.get('statusClass')).to.equal(status);
+    });
+  });
+
+  describe('#statusIconClass', function () {
+    var testCases = [
+      {
+        statusClass: 'STARTED',
+        result: 'icon-ok-sign'
+      },
+      {
+        statusClass: 'STARTING',
+        result: 'icon-ok-sign'
+      },
+      {
+        statusClass: 'INSTALLED',
+        result: 'icon-warning-sign'
+      },
+      {
+        statusClass: 'STOPPING',
+        result: 'icon-warning-sign'
+      },
+      {
+        statusClass: 'UNKNOWN',
+        result: 'icon-question-sign'
+      },
+      {
+        statusClass: '',
+        result: ''
+      }
+    ];
+
+    it('reset statusClass to plain property', function () {
+      hc.reopen({
+        statusClass: ''
+      })
+    });
+    testCases.forEach(function (test) {
+      it('statusClass - ' + test.statusClass, function () {
+        hc.set('statusClass', test.statusClass);
+        hc.propertyDidChange('statusIconClass');
+        expect(hc.get('statusIconClass')).to.equal(test.result);
+      });
+    });
+  });
+
+  describe('#componentTextStatus', function() {
+    it('', function() {
+      var status = 'INSTALLED';
+      sinon.stub(App.HostComponentStatus, 'getTextStatus', Em.K);
+      hc.set('workStatus', status);
+      hc.propertyDidChange('componentTextStatus');
+      hc.get('componentTextStatus');
+      expect(App.HostComponentStatus.getTextStatus.calledWith(status)).to.be.true;
+      App.HostComponentStatus.getTextStatus.restore();
+    });
+  });
 });
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/a8263345/ambari-web/test/models/host_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/models/host_test.js b/ambari-web/test/models/host_test.js
index 26144d7..674496d 100644
--- a/ambari-web/test/models/host_test.js
+++ b/ambari-web/test/models/host_test.js
@@ -17,6 +17,7 @@
  */
 
 var App = require('app');
+var misc = require('utils/misc');
 
 require('models/host');
 
@@ -54,13 +55,17 @@ describe('App.Host', function () {
   before(function() {
     App.set('testMode', false);
   });
+  App.Host.reopen({
+    hostComponents: []
+  });
   App.store.loadMany(App.Host, data);
 
+  var host1 = App.Host.find('host1');
+
   describe('#diskUsedFormatted', function () {
 
     it('host1 - 10GB ', function () {
-      var host = App.Host.find().findProperty('hostName', 'host1');
-      expect(host.get('diskUsedFormatted')).to.equal('10GB');
+      expect(host1.get('diskUsedFormatted')).to.equal('10GB');
     });
     it('host2 - 0GB', function () {
       var host = App.Host.find().findProperty('hostName', 'host2');
@@ -75,8 +80,7 @@ describe('App.Host', function () {
   describe('#diskTotalFormatted', function () {
 
     it('host1 - 100.56GB ', function () {
-      var host = App.Host.find().findProperty('hostName', 'host1');
-      expect(host.get('diskTotalFormatted')).to.equal('100.56GB');
+      expect(host1.get('diskTotalFormatted')).to.equal('100.56GB');
     });
     it('host2 - 90GB', function () {
       var host = App.Host.find().findProperty('hostName', 'host2');
@@ -91,8 +95,7 @@ describe('App.Host', function () {
   describe('#diskUsageFormatted', function () {
 
     it('host1 - 9.94% ', function () {
-      var host = App.Host.find().findProperty('hostName', 'host1');
-      expect(host.get('diskUsageFormatted')).to.equal('9.94%');
+      expect(host1.get('diskUsageFormatted')).to.equal('9.94%');
     });
     it('host2 - 0%', function () {
       var host = App.Host.find().findProperty('hostName', 'host2');
@@ -115,4 +118,346 @@ describe('App.Host', function () {
     });
   });
 
+  describe('#cpuUsage', function () {
+    var testCases = [
+      {
+        params: {
+          cpuSystem: undefined,
+          cpuUser: undefined
+        },
+        result: 0
+      },
+      {
+        params: {
+          cpuSystem: 0,
+          cpuUser: 0
+        },
+        result: 0
+      },
+      {
+        params: {
+          cpuSystem: 1,
+          cpuUser: 0
+        },
+        result: 0
+      },
+      {
+        params: {
+          cpuSystem: 0,
+          cpuUser: 1
+        },
+        result: 0
+      },
+      {
+        params: {
+          cpuSystem: 1,
+          cpuUser: 1
+        },
+        result: 2
+      }
+    ];
+    testCases.forEach(function (test) {
+      it('cpuSystem - ' + test.params.cpuSystem + ', cpuUser - ' + test.params.cpuUser, function () {
+        host1.set('cpuSystem', test.params.cpuSystem);
+        host1.set('cpuUser', test.params.cpuUser);
+        host1.propertyDidChange('cpuUsage');
+
+        expect(host1.get('cpuUsage')).to.equal(test.result);
+      });
+    });
+  });
+
+  describe('#memoryUsage', function () {
+    var testCases = [
+      {
+        params: {
+          memFree: undefined,
+          memTotal: undefined
+        },
+        result: 0
+      },
+      {
+        params: {
+          memFree: 0,
+          memTotal: 0
+        },
+        result: 0
+      },
+      {
+        params: {
+          memFree: 1,
+          memTotal: 0
+        },
+        result: 0
+      },
+      {
+        params: {
+          memFree: 0,
+          memTotal: 1
+        },
+        result: 0
+      },
+      {
+        params: {
+          memFree: 1,
+          memTotal: 2
+        },
+        result: 50
+      }
+    ];
+    testCases.forEach(function (test) {
+      it('memFree - ' + test.params.memFree + ', memTotal - ' + test.params.memTotal, function () {
+        host1.set('memFree', test.params.memFree);
+        host1.set('memTotal', test.params.memTotal);
+        host1.propertyDidChange('memoryUsage');
+
+        expect(host1.get('memoryUsage')).to.equal(test.result);
+      });
+    });
+  });
+
+  describe('#componentsWithStaleConfigs', function () {
+    it('One component with stale configs', function () {
+      host1.set('hostComponents', [Em.Object.create({
+        staleConfigs: true
+      })]);
+      host1.propertyDidChange('componentsWithStaleConfigs');
+      expect(host1.get('componentsWithStaleConfigs')).to.eql([Em.Object.create({
+        staleConfigs: true
+      })]);
+    });
+    it('No components with stale configs', function () {
+      host1.set('hostComponents', [Em.Object.create({
+        staleConfigs: false
+      })]);
+      host1.propertyDidChange('componentsWithStaleConfigs');
+      expect(host1.get('componentsWithStaleConfigs')).to.be.empty;
+    });
+  });
+
+  describe('#componentsInPassiveStateCount', function () {
+    it('No component in passive state', function () {
+      host1.set('hostComponents', [Em.Object.create({
+        passiveState: 'OFF'
+      })]);
+      host1.propertyDidChange('componentsInPassiveStateCount');
+
+      expect(host1.get('componentsInPassiveStateCount')).to.equal(0);
+    });
+    it('One component in passive state', function () {
+      host1.set('hostComponents', [Em.Object.create({
+        passiveState: 'ON'
+      })]);
+      host1.propertyDidChange('componentsInPassiveStateCount');
+
+      expect(host1.get('componentsInPassiveStateCount')).to.equal(1);
+    });
+  });
+
+  describe('#disksMounted', function () {
+    it('', function () {
+      host1.set('diskInfo', [
+        {}
+      ]);
+      host1.propertyDidChange('disksMounted');
+      expect(host1.get('disksMounted')).to.equal(1);
+    });
+  });
+
+  describe('#coresFormatted', function () {
+    it('', function () {
+      host1.set('cpu', 1);
+      host1.set('cpuPhysical', 2);
+      host1.propertyDidChange('coresFormatted');
+      expect(host1.get('coresFormatted')).to.equal('1 (2)');
+    });
+  });
+
+  describe('#diskUsed', function () {
+    it('diskFree and diskTotal are 0', function () {
+      host1.set('diskFree', 0);
+      host1.set('diskTotal', 0);
+      host1.propertyDidChange('diskUsed');
+      expect(host1.get('diskUsed')).to.equal(0);
+    });
+    it('diskFree is 0 and diskTotal is 10', function () {
+      host1.set('diskFree', 0);
+      host1.set('diskTotal', 10);
+      host1.propertyDidChange('diskUsed');
+      expect(host1.get('diskUsed')).to.equal(10);
+    });
+  });
+
+  describe('#diskUsage', function () {
+    it('', function () {
+      host1.reopen({
+        diskUsed: 10
+      });
+      host1.set('diskTotal', 100);
+      host1.propertyDidChange('diskUsage');
+      expect(host1.get('diskUsage')).to.equal(10);
+    });
+  });
+
+  describe('#memoryFormatted', function () {
+    it('', function () {
+      host1.set('memory', 1024);
+      sinon.stub(misc, 'formatBandwidth', Em.K);
+      host1.propertyDidChange('memoryFormatted');
+      host1.get('memoryFormatted');
+      expect(misc.formatBandwidth.calledWith(1048576)).to.be.true;
+      misc.formatBandwidth.restore()
+    });
+  });
+
+  describe('#loadAvg', function () {
+    var testCases = [
+      {
+        params: {
+          loadOne: null,
+          loadFive: null,
+          loadFifteen: null
+        },
+        result: null
+      },
+      {
+        params: {
+          loadOne: 1.111,
+          loadFive: 5.555,
+          loadFifteen: 15.555
+        },
+        result: '1.11'
+      },
+      {
+        params: {
+          loadOne: null,
+          loadFive: 5.555,
+          loadFifteen: 15.555
+        },
+        result: '5.55'
+      },
+      {
+        params: {
+          loadOne: null,
+          loadFive: null,
+          loadFifteen: 15.555
+        },
+        result: '15.55'
+      }
+    ];
+
+    testCases.forEach(function (test) {
+      it('loadOne - ' + test.params.loadOne + ', loadFive - ' + test.params.loadFive + ', loadFifteen - ' + test.params.loadFifteen, function () {
+        host1.set('loadOne', test.params.loadOne);
+        host1.set('loadFive', test.params.loadFive);
+        host1.set('loadFifteen', test.params.loadFifteen);
+        host1.propertyDidChange('loadAvg');
+        expect(host1.get('loadAvg')).to.equal(test.result);
+      });
+    });
+  });
+
+  describe('#healthClass', function () {
+    var testCases = [
+      {
+        params: {
+          passiveState: 'ON',
+          healthStatus: null
+        },
+        result: 'icon-medkit'
+      },
+      {
+        params: {
+          passiveState: 'OFF',
+          healthStatus: 'UNKNOWN'
+        },
+        result: 'health-status-DEAD-YELLOW'
+      },
+      {
+        params: {
+          passiveState: 'OFF',
+          healthStatus: 'HEALTHY'
+        },
+        result: 'health-status-LIVE'
+      },
+      {
+        params: {
+          passiveState: 'OFF',
+          healthStatus: 'UNHEALTHY'
+        },
+        result: 'health-status-DEAD-RED'
+      },
+      {
+        params: {
+          passiveState: 'OFF',
+          healthStatus: 'ALERT'
+        },
+        result: 'health-status-DEAD-ORANGE'
+      },
+      {
+        params: {
+          passiveState: 'OFF',
+          healthStatus: null
+        },
+        result: 'health-status-DEAD-YELLOW'
+      }
+    ];
+
+    testCases.forEach(function (test) {
+      it('passiveState - ' + test.params.passiveState + ', healthStatus - ' + test.params.healthStatus, function () {
+        host1.set('passiveState', test.params.passiveState);
+        host1.set('healthStatus', test.params.healthStatus);
+        host1.propertyDidChange('healthClass');
+        expect(host1.get('healthClass')).to.equal(test.result);
+      });
+    });
+  });
+
+  describe('#healthIconClass', function () {
+    var testCases = [
+      {
+        params: {
+          healthClass: 'health-status-LIVE'
+        },
+        result: 'icon-ok-sign'
+      },
+      {
+        params: {
+          healthClass: 'health-status-DEAD-RED'
+        },
+        result: 'icon-warning-sign'
+      },
+      {
+        params: {
+          healthClass: 'health-status-DEAD-YELLOW'
+        },
+        result: 'icon-question-sign'
+      },
+      {
+        params: {
+          healthClass: 'health-status-DEAD-ORANGE'
+        },
+        result: 'icon-minus-sign'
+      },
+      {
+        params: {
+          healthClass: ''
+        },
+        result: ''
+      }
+    ];
+
+    it('reset healthClass to plain property', function(){
+      host1.reopen({
+        healthClass: ''
+      });
+    });
+    testCases.forEach(function (test) {
+      it('healthClass - ' + test.params.healthClass, function () {
+        host1.set('healthClass', test.params.healthClass);
+        host1.propertyDidChange('healthIconClass');
+        expect(host1.get('healthIconClass')).to.equal(test.result);
+      });
+    });
+  });
 });

http://git-wip-us.apache.org/repos/asf/ambari/blob/a8263345/ambari-web/test/models/service_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/models/service_test.js b/ambari-web/test/models/service_test.js
index 204badc..2467932 100644
--- a/ambari-web/test/models/service_test.js
+++ b/ambari-web/test/models/service_test.js
@@ -225,32 +225,6 @@ describe('App.Service', function () {
     });
   });
 
-  describe('#isClientsOnly', function () {
-    clientsOnly.forEach(function (item) {
-      it('should be true', function () {
-        service.set('serviceName', item);
-        expect(service.get('isClientsOnly')).to.be.true;
-      });
-    });
-    it('should be false', function () {
-      service.set('serviceName', 'HDFS');
-      expect(service.get('isClientsOnly')).to.be.false;
-    });
-  });
-
-  describe('#isConfigurable', function () {
-    configurable.forEach(function (item) {
-      it('should be true', function () {
-        service.set('serviceName', item);
-        expect(service.get('isConfigurable')).to.be.true;
-      });
-    });
-    it('should be false', function () {
-      service.set('serviceName', 'SQOOP');
-      expect(service.get('isConfigurable')).to.be.false;
-    });
-  });
-
   describe('#isRestartRequired', function () {
     hostComponentsDataFalse.forEach(function (item) {
       it('should be false', function () {
@@ -279,4 +253,37 @@ describe('App.Service', function () {
     });
   });
 
+  describe('#serviceTypes', function () {
+    var testCases = [
+      {
+        serviceName: 'PIG',
+        result: []
+      },
+      {
+        serviceName: 'GANGLIA',
+        result: ['MONITORING']
+      },
+      {
+        serviceName: 'NAGIOS',
+        result: ['MONITORING']
+      },
+      {
+        serviceName: 'HDFS',
+        result: ['HA_MODE']
+      },
+      {
+        serviceName: 'YARN',
+        result: ['HA_MODE']
+      }
+    ];
+    testCases.forEach(function (test) {
+      it('service name - ' + test.serviceName, function () {
+        service.set('serviceName', test.serviceName);
+        service.propertyDidChange('serviceTypes');
+        expect(service.get('serviceTypes')).to.eql(test.result);
+      });
+    });
+  });
+
+
 });

http://git-wip-us.apache.org/repos/asf/ambari/blob/a8263345/ambari-web/test/models/stack_service_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/models/stack_service_test.js b/ambari-web/test/models/stack_service_test.js
new file mode 100644
index 0000000..07b4d66
--- /dev/null
+++ b/ambari-web/test/models/stack_service_test.js
@@ -0,0 +1,343 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+var App = require('app');
+
+require('models/stack_service');
+
+describe('App.StackService', function () {
+
+  App.store.load(App.StackService, {
+    id: 'S1'
+  });
+
+  var ss = App.StackService.find('S1');
+  ss.reopen({
+    serviceComponents: []
+  });
+
+  describe('#isDFS', function () {
+    it('service name is "SERVICE"', function () {
+      ss.set('serviceName', 'SERVICE');
+      ss.propertyDidChange('isDFS');
+      expect(ss.get('isDFS')).to.be.false;
+    });
+    it('service name is "HDFS"', function () {
+      ss.set('serviceName', 'HDFS');
+      ss.propertyDidChange('isDFS');
+      expect(ss.get('isDFS')).to.be.true;
+    });
+    it('service name is "GLUSTERFS"', function () {
+      ss.set('serviceName', 'GLUSTERFS');
+      ss.propertyDidChange('isDFS');
+      expect(ss.get('isDFS')).to.be.true;
+    });
+  });
+
+  describe('#isPrimaryDFS', function () {
+    it('service name is "SERVICE"', function () {
+      ss.set('serviceName', 'SERVICE');
+      ss.propertyDidChange('isPrimaryDFS');
+      expect(ss.get('isPrimaryDFS')).to.be.false;
+    });
+    it('service name is "HDFS"', function () {
+      ss.set('serviceName', 'HDFS');
+      ss.propertyDidChange('isPrimaryDFS');
+      expect(ss.get('isPrimaryDFS')).to.be.true;
+    });
+  });
+
+  describe('#configTypesRendered', function () {
+    ss.set('configTypes', {
+      'core-site': {},
+      'hdfs-site': {}
+    });
+    it('service name is "SERVICE"', function () {
+      ss.set('serviceName', 'SERVICE');
+      ss.propertyDidChange('configTypesRendered');
+      expect(ss.get('configTypesRendered')).to.eql({'hdfs-site': {}});
+    });
+    it('service name is "GLUSTERFS"', function () {
+      ss.set('serviceName', 'GLUSTERFS');
+      ss.propertyDidChange('configTypesRendered');
+      expect(ss.get('configTypesRendered')).to.eql({'core-site': {}, 'hdfs-site': {}});
+    });
+    it('service name is "HDFS"', function () {
+      ss.set('serviceName', 'HDFS');
+      ss.propertyDidChange('configTypesRendered');
+      expect(ss.get('configTypesRendered')).to.eql({'core-site': {}, 'hdfs-site': {}});
+    });
+  });
+
+  describe('#displayNameOnSelectServicePage', function () {
+    it('No coSelectedServices', function () {
+      ss.set('serviceName', 'HDFS');
+      ss.set('displayName', 'HDFS');
+      ss.propertyDidChange('displayNameOnSelectServicePage');
+      expect(ss.get('displayNameOnSelectServicePage')).to.equal('HDFS');
+    });
+    it('Present coSelectedServices', function () {
+      ss.set('serviceName', 'YARN');
+      ss.set('displayName', 'YARN');
+      ss.propertyDidChange('displayNameOnSelectServicePage');
+      expect(ss.get('displayNameOnSelectServicePage')).to.equal('YARN + MapReduce2');
+    });
+  });
+
+  describe('#isHiddenOnSelectServicePage', function () {
+    var testCases = [
+      {
+        serviceName: 'HDFS',
+        result: false
+      },
+      {
+        serviceName: 'MAPREDUCE2',
+        result: true
+      },
+      {
+        serviceName: 'HCATALOG',
+        result: true
+      },
+      {
+        serviceName: 'WEBHCAT',
+        result: true
+      }
+    ];
+
+    testCases.forEach(function (test) {
+      it('service name - ' + test.serviceName, function () {
+        ss.set('serviceName', test.serviceName);
+        ss.propertyDidChange('isHiddenOnSelectServicePage');
+        expect(ss.get('isHiddenOnSelectServicePage')).to.equal(test.result);
+      });
+    });
+  });
+
+  describe('#dependentServices', function () {
+    var mock = {
+      isHadoop2Stack: false
+    };
+    beforeEach(function () {
+      sinon.stub(App, 'get', function () {
+        return mock.isHadoop2Stack;
+      })
+    });
+    afterEach(function () {
+      App.get.restore();
+    });
+    it('isHadoop2Stack is false', function () {
+      ss.set('serviceName', 'HDFS');
+      ss.propertyDidChange('dependentServices');
+      expect(ss.get('dependentServices')).to.eql(['MAPREDUCE', 'HBASE', 'SQOOP']);
+    });
+    it('isHadoop2Stack is true', function () {
+      mock.isHadoop2Stack = true;
+      ss.set('serviceName', 'HDFS');
+      ss.propertyDidChange('dependentServices');
+      expect(ss.get('dependentServices')).to.eql(['YARN', 'HBASE', 'FLUME', 'SQOOP']);
+    });
+  });
+
+  describe('#serviceDependency', function () {
+    var mock = {
+      isHadoop2Stack: false
+    };
+    beforeEach(function () {
+      sinon.stub(App, 'get', function () {
+        return mock.isHadoop2Stack;
+      })
+    });
+    afterEach(function () {
+      App.get.restore();
+    });
+    it('isHadoop2Stack is false', function () {
+      ss.set('serviceName', 'HDFS');
+      ss.propertyDidChange('serviceDependency');
+      expect(ss.get('serviceDependency')).to.eql([]);
+    });
+    it('isHadoop2Stack is true', function () {
+      mock.isHadoop2Stack = true;
+      ss.set('serviceName', 'HDFS');
+      ss.propertyDidChange('serviceDependency');
+      expect(ss.get('serviceDependency')).to.eql(["ZOOKEEPER"]);
+    });
+  });
+
+  describe('#isMonitoringService', function () {
+    var testCases = [
+      {
+        serviceName: 'HDFS',
+        result: false
+      },
+      {
+        serviceName: 'NAGIOS',
+        result: true
+      },
+      {
+        serviceName: 'GANGLIA',
+        result: true
+      }
+    ];
+
+    testCases.forEach(function (test) {
+      it('service name - ' + test.serviceName, function () {
+        ss.set('serviceName', test.serviceName);
+        ss.propertyDidChange('isMonitoringService');
+        expect(ss.get('isMonitoringService')).to.equal(test.result);
+      });
+    });
+  });
+
+  describe('#hasClient', function () {
+    it('No client serviceComponents', function () {
+      ss.set('serviceComponents', []);
+      ss.propertyDidChange('hasClient');
+      expect(ss.get('hasClient')).to.be.false;
+    });
+    it('Has client serviceComponents', function () {
+      ss.set('serviceComponents', [Em.Object.create({isClient: true})]);
+      ss.propertyDidChange('hasClient');
+      expect(ss.get('hasClient')).to.be.true;
+    });
+  });
+
+  describe('#hasMaster', function () {
+    it('No master serviceComponents', function () {
+      ss.set('serviceComponents', []);
+      ss.propertyDidChange('hasMaster');
+      expect(ss.get('hasMaster')).to.be.false;
+    });
+    it('Has master serviceComponents', function () {
+      ss.set('serviceComponents', [Em.Object.create({isMaster: true})]);
+      ss.propertyDidChange('hasMaster');
+      expect(ss.get('hasMaster')).to.be.true;
+    });
+  });
+
+  describe('#hasSlave', function () {
+    it('No slave serviceComponents', function () {
+      ss.set('serviceComponents', []);
+      ss.propertyDidChange('hasSlave');
+      expect(ss.get('hasSlave')).to.be.false;
+    });
+    it('Has slave serviceComponents', function () {
+      ss.set('serviceComponents', [Em.Object.create({isSlave: true})]);
+      ss.propertyDidChange('hasSlave');
+      expect(ss.get('hasSlave')).to.be.true;
+    });
+  });
+
+  describe('#isClientOnlyService', function () {
+    it('Has not only client serviceComponents', function () {
+      ss.set('serviceComponents', [Em.Object.create({isSlave: true}), Em.Object.create({isClient: true})]);
+      ss.propertyDidChange('isClientOnlyService');
+      expect(ss.get('isClientOnlyService')).to.be.false;
+    });
+    it('Has only client serviceComponents', function () {
+      ss.set('serviceComponents', [Em.Object.create({isClient: true})]);
+      ss.propertyDidChange('isClientOnlyService');
+      expect(ss.get('isClientOnlyService')).to.be.true;
+    });
+  });
+
+  describe('#isNoConfigTypes', function () {
+    it('configTypes is null', function () {
+      ss.set('configTypes', null);
+      ss.propertyDidChange('isNoConfigTypes');
+      expect(ss.get('isNoConfigTypes')).to.be.true;
+    });
+    it('configTypes is empty', function () {
+      ss.set('configTypes', {});
+      ss.propertyDidChange('isNoConfigTypes');
+      expect(ss.get('isNoConfigTypes')).to.be.true;
+    });
+    it('configTypes is correct', function () {
+      ss.set('configTypes', {'key': {}});
+      ss.propertyDidChange('isNoConfigTypes');
+      expect(ss.get('isNoConfigTypes')).to.be.false;
+    });
+  });
+
+  describe('#customReviewHandler', function () {
+    it('service name is HDFS', function () {
+      ss.set('serviceName', 'HDFS');
+      ss.propertyDidChange('customReviewHandler');
+      expect(ss.get('customReviewHandler')).to.be.undefined;
+    });
+    it('service name is HIVE', function () {
+      ss.set('serviceName', 'HIVE');
+      ss.propertyDidChange('customReviewHandler');
+      expect(ss.get('customReviewHandler')).to.eql({
+        "Database": "loadHiveDbValue"
+      });
+    });
+  });
+
+  describe('#defaultsProviders', function () {
+    it('service name is HDFS', function () {
+      ss.set('serviceName', 'HDFS');
+      ss.propertyDidChange('defaultsProviders');
+      expect(ss.get('defaultsProviders')).to.be.undefined;
+    });
+    it('service name is HIVE', function () {
+      ss.set('serviceName', 'HIVE');
+      ss.propertyDidChange('defaultsProviders');
+      expect(ss.get('defaultsProviders')).to.not.be.empty;
+    });
+  });
+
+  describe('#configsValidator', function () {
+    it('service name is HDFS', function () {
+      ss.set('serviceName', 'HDFS');
+      ss.propertyDidChange('configsValidator');
+      expect(ss.get('configsValidator')).to.be.undefined;
+    });
+    it('service name is HIVE', function () {
+      ss.set('serviceName', 'HIVE');
+      ss.propertyDidChange('configsValidator');
+      expect(ss.get('configsValidator')).to.not.be.empty;
+    });
+  });
+
+  describe('#configCategories', function () {
+    it('HDFS service with no serviceComponents', function () {
+      ss.set('serviceComponents', []);
+      ss.set('serviceName', 'HDFS');
+      ss.propertyDidChange('configCategories');
+      expect(ss.get('configCategories').mapProperty('name')).to.eql([
+        "General",
+        "Advanced",
+        "Advanced key",
+        "Custom key"
+      ]);
+    });
+    it('HDFS service with DATANODE serviceComponents', function () {
+      ss.set('serviceComponents', [Em.Object.create({componentName: 'DATANODE'})]);
+      ss.set('serviceName', 'HDFS');
+      ss.propertyDidChange('configCategories');
+      expect(ss.get('configCategories').mapProperty('name')).to.eql([
+        "DATANODE",
+        "General",
+        "Advanced",
+        "Advanced key",
+        "Custom key"]);
+    });
+  });
+
+
+});

http://git-wip-us.apache.org/repos/asf/ambari/blob/a8263345/ambari-web/test/views/common/configs/config_history_flow_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/views/common/configs/config_history_flow_test.js b/ambari-web/test/views/common/configs/config_history_flow_test.js
index 3317550..59d38e2 100644
--- a/ambari-web/test/views/common/configs/config_history_flow_test.js
+++ b/ambari-web/test/views/common/configs/config_history_flow_test.js
@@ -512,7 +512,7 @@ describe('App.ConfigHistoryFlowView', function () {
       sinon.spy(view.get('controller'), 'onConfigGroupChange');
       view.compare({context: Em.Object.create({version: 1})});
 
-      expect(view.get('controller.compareServiceVersion.version')).to.equal(1);
+      expect(view.get('controller.compareServiceVersion')).to.eql(Em.Object.create({version: 1}));
       expect(view.get('controller').onConfigGroupChange.calledOnce).to.be.true;
       view.get('controller').onConfigGroupChange.restore();
     });
@@ -520,13 +520,16 @@ describe('App.ConfigHistoryFlowView', function () {
 
   describe('#revert()', function () {
     beforeEach(function () {
-      sinon.stub(App, 'showConfirmationPopup', function (callback) {
-        callback();
+      sinon.stub(App.ModalPopup, 'show', function (options) {
+        options.onPrimary.call(Em.Object.create({
+          serviceConfigNote: 'note',
+          hide: Em.K
+        }));
       });
       sinon.stub(view, 'sendRevertCall', Em.K);
     });
     afterEach(function () {
-      App.showConfirmationPopup.restore();
+      App.ModalPopup.show.restore();
       view.sendRevertCall.restore();
     });
     it('context passed', function () {
@@ -535,10 +538,11 @@ describe('App.ConfigHistoryFlowView', function () {
         serviceName: 'S1'
       })});
 
-      expect(App.showConfirmationPopup.calledOnce).to.be.true;
+      expect(App.ModalPopup.show.calledOnce).to.be.true;
       expect(view.sendRevertCall.calledWith(Em.Object.create({
         version: 1,
-        serviceName: 'S1'
+        serviceName: 'S1',
+        serviceConfigNote: 'note'
       }))).to.be.true;
     });
     it('context is not passed', function () {
@@ -548,10 +552,12 @@ describe('App.ConfigHistoryFlowView', function () {
       }));
       view.revert({});
 
-      expect(App.showConfirmationPopup.calledOnce).to.be.true;
+      expect(App.ModalPopup.show.calledOnce).to.be.true;
       expect(view.sendRevertCall.calledWith(Em.Object.create({
         version: 1,
-        serviceName: 'S1'
+        serviceName: 'S1',
+        serviceConfigNote: 'note',
+        notes: ''
       }))).to.be.true;
     });
   });


[09/35] git commit: AMBARI-6986. Upgrade to 1.7.0, ambari+mysql fails on "version" column.(vbrodetskyi)

Posted by jo...@apache.org.
AMBARI-6986. Upgrade to 1.7.0, ambari+mysql fails on "version" column.(vbrodetskyi)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/160abc54
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/160abc54
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/160abc54

Branch: refs/heads/branch-alerts-dev
Commit: 160abc549c03cae1f34511582bc8a1cda467157b
Parents: a80066a
Author: Vitaly Brodetskyi <vb...@hortonworks.com>
Authored: Tue Aug 26 13:49:22 2014 +0300
Committer: Vitaly Brodetskyi <vb...@hortonworks.com>
Committed: Tue Aug 26 13:49:22 2014 +0300

----------------------------------------------------------------------
 .../java/org/apache/ambari/server/upgrade/UpgradeCatalog170.java   | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/160abc54/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog170.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog170.java b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog170.java
index eeba932..30059ac 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog170.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog170.java
@@ -437,7 +437,7 @@ public class UpgradeCatalog170 extends AbstractUpgradeCatalog {
   private void renameSequenceValueColumnName() throws AmbariException, SQLException {
     final String dbType = getDbType();
     if (Configuration.MYSQL_DB_NAME.equals(dbType)) {
-      dbAccessor.executeQuery("ALTER TABLE ambari_sequences RENAME COLUMN \"value\" to sequence_value DECIMAL(38) NOT NULL");
+      dbAccessor.executeQuery("ALTER TABLE ambari_sequences RENAME COLUMN value to sequence_value DECIMAL(38) NOT NULL");
     } else if (Configuration.DERBY_DB_NAME.equals(dbType)) {
       dbAccessor.executeQuery("RENAME COLUMN ambari_sequences.\"value\" to sequence_value");
     } else if (Configuration.ORACLE_DB_NAME.equals(dbType)) {


[20/35] git commit: AMBARI-6957. Fixes NPE during cluster create with blueprint.

Posted by jo...@apache.org.
AMBARI-6957.  Fixes NPE during cluster create with blueprint.


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/6237724d
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/6237724d
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/6237724d

Branch: refs/heads/branch-alerts-dev
Commit: 6237724d4f5cb02bd1541deba6542a72be04af2a
Parents: 2cceee2
Author: Robert Nettleton <rn...@hortonworks.com>
Authored: Tue Aug 26 14:04:53 2014 -0400
Committer: John Speidel <js...@hortonworks.com>
Committed: Tue Aug 26 14:04:53 2014 -0400

----------------------------------------------------------------------
 .../internal/BaseBlueprintProcessor.java        |  37 +-
 .../BlueprintConfigurationProcessor.java        |   5 +-
 .../internal/ClusterResourceProvider.java       |  73 ++-
 .../internal/ClusterResourceProviderTest.java   | 497 ++++++++++++++++++-
 4 files changed, 584 insertions(+), 28 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/6237724d/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BaseBlueprintProcessor.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BaseBlueprintProcessor.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BaseBlueprintProcessor.java
index a4165d7..c9f0124 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BaseBlueprintProcessor.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BaseBlueprintProcessor.java
@@ -106,7 +106,7 @@ public abstract class BaseBlueprintProcessor extends AbstractControllerResourceP
     Map<String, HostGroupImpl> mapHostGroups = new HashMap<String, HostGroupImpl>();
 
     for (HostGroupEntity hostGroup : blueprint.getHostGroups()) {
-      mapHostGroups.put(hostGroup.getName(), new HostGroupImpl(hostGroup, stack));
+      mapHostGroups.put(hostGroup.getName(), new HostGroupImpl(hostGroup, stack, this));
     }
     return mapHostGroups;
   }
@@ -123,7 +123,7 @@ public abstract class BaseBlueprintProcessor extends AbstractControllerResourceP
   protected Stack parseStack(BlueprintEntity blueprint) throws SystemException {
     Stack stack;
     try {
-      stack = new Stack(blueprint.getStackName(), blueprint.getStackVersion());
+      stack = new Stack(blueprint.getStackName(), blueprint.getStackVersion(), getManagementController());
     } catch (StackAccessException e) {
       throw new IllegalArgumentException("Invalid stack information provided for cluster.  " +
           "stack name: " + blueprint.getStackName() +
@@ -148,7 +148,7 @@ public abstract class BaseBlueprintProcessor extends AbstractControllerResourceP
    * @throws IllegalArgumentException when validation fails
    */
   protected BlueprintEntity validateTopology(BlueprintEntity blueprint) throws AmbariException {
-    Stack stack = new Stack(blueprint.getStackName(), blueprint.getStackVersion());
+    Stack stack = new Stack(blueprint.getStackName(), blueprint.getStackVersion(), getManagementController());
     Map<String, HostGroupImpl> hostGroupMap = parseBlueprintHostGroups(blueprint, stack);
     Collection<HostGroupImpl> hostGroups = hostGroupMap.values();
     Map<String, Map<String, String>> clusterConfig = processBlueprintConfigurations(blueprint, null);
@@ -455,7 +455,7 @@ public abstract class BaseBlueprintProcessor extends AbstractControllerResourceP
   /**
    * Encapsulates stack information.
    */
-  protected class Stack {
+  protected static class Stack {
     /**
      * Stack name
      */
@@ -514,6 +514,12 @@ public abstract class BaseBlueprintProcessor extends AbstractControllerResourceP
     private Map<String, Map<String, Map<String, ConfigProperty>>> serviceConfigurations =
         new HashMap<String, Map<String, Map<String, ConfigProperty>>>();
 
+
+    /**
+     * Ambari Management Controller, used to obtain Stack definitions
+     */
+    private final AmbariManagementController ambariManagementController;
+
     /**
      * Contains a configuration property's value and attributes.
      */
@@ -553,11 +559,12 @@ public abstract class BaseBlueprintProcessor extends AbstractControllerResourceP
      * @throws AmbariException an exception occurred getting stack information
      *                         for the specified name and version
      */
-    public Stack(String name, String version) throws AmbariException {
+    public Stack(String name, String version, AmbariManagementController ambariManagementController) throws AmbariException {
       this.name = name;
       this.version = version;
+      this.ambariManagementController = ambariManagementController;
 
-      Set<StackServiceResponse> stackServices = getManagementController().getStackServices(
+      Set<StackServiceResponse> stackServices = ambariManagementController.getStackServices(
           Collections.singleton(new StackServiceRequest(name, version, null)));
 
       for (StackServiceResponse stackService : stackServices) {
@@ -767,7 +774,7 @@ public abstract class BaseBlueprintProcessor extends AbstractControllerResourceP
     private void parseComponents(String service) throws AmbariException{
       Collection<String> componentSet = new HashSet<String>();
 
-      Set<StackServiceComponentResponse> components = getManagementController().getStackComponents(
+      Set<StackServiceComponentResponse> components = ambariManagementController.getStackComponents(
           Collections.singleton(new StackServiceComponentRequest(name, version, service, null)));
 
       // stack service components
@@ -807,7 +814,7 @@ public abstract class BaseBlueprintProcessor extends AbstractControllerResourceP
 
       serviceConfigurations.put(service, mapServiceConfig);
 
-      Set<StackConfigurationResponse> serviceConfigs = getManagementController().getStackConfigurations(
+      Set<StackConfigurationResponse> serviceConfigs = ambariManagementController.getStackConfigurations(
           Collections.singleton(new StackConfigurationRequest(name, version, service, null)));
 
       for (StackConfigurationResponse config : serviceConfigs) {
@@ -846,7 +853,7 @@ public abstract class BaseBlueprintProcessor extends AbstractControllerResourceP
   /**
    * Host group representation.
    */
-  protected class HostGroupImpl implements HostGroup {
+  protected static class HostGroupImpl implements HostGroup {
     /**
      * Host group entity
      */
@@ -880,14 +887,20 @@ public abstract class BaseBlueprintProcessor extends AbstractControllerResourceP
     private Stack stack;
 
     /**
+     * The Blueprint processor associated with this HostGroupImpl instance
+     */
+    private final BaseBlueprintProcessor blueprintProcessor;
+
+    /**
      * Constructor.
      *
      * @param hostGroup  host group
      * @param stack      stack
      */
-    public HostGroupImpl(HostGroupEntity hostGroup, Stack stack) {
+    public HostGroupImpl(HostGroupEntity hostGroup, Stack stack, BaseBlueprintProcessor blueprintProcessor) {
       this.hostGroup = hostGroup;
       this.stack = stack;
+      this.blueprintProcessor = blueprintProcessor;
       parseComponents();
       parseConfigurations();
     }
@@ -1009,14 +1022,14 @@ public abstract class BaseBlueprintProcessor extends AbstractControllerResourceP
           boolean           resolved        = false;
 
           if (dependencyScope.equals("cluster")) {
-            Collection<String> missingDependencyInfo = verifyComponentCardinalityCount(entity, hostGroups,
+            Collection<String> missingDependencyInfo = blueprintProcessor.verifyComponentCardinalityCount(entity, hostGroups,
                 componentName, new Cardinality("1+"), autoDeployInfo, stack, clusterConfig);
             resolved = missingDependencyInfo.isEmpty();
           } else if (dependencyScope.equals("host")) {
             if (components.contains(component) || (autoDeployInfo != null && autoDeployInfo.isEnabled())) {
               resolved = true;
               if (addComponent(componentName)) {
-                addComponentToBlueprint(hostGroup.getBlueprintEntity(), getEntity().getName(), componentName);
+                blueprintProcessor.addComponentToBlueprint(hostGroup.getBlueprintEntity(), getEntity().getName(), componentName);
               }
             }
           }

http://git-wip-us.apache.org/repos/asf/ambari/blob/6237724d/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BlueprintConfigurationProcessor.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BlueprintConfigurationProcessor.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BlueprintConfigurationProcessor.java
index c7eef57..c246f83 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BlueprintConfigurationProcessor.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BlueprintConfigurationProcessor.java
@@ -150,7 +150,8 @@ public class BlueprintConfigurationProcessor {
           for (HostGroup group : hostGroups) {
             Collection<String> hosts = group.getHostInfo();
             for (String host : hosts) {
-              if (propValue.contains(host)) {    //todo: need to use regular expression to avoid matching a host which is a superset.  Can this be fixed???
+              //todo: need to use regular expression to avoid matching a host which is a superset.
+              if (propValue.contains(host)) {
                 matchedHost = true;
                 typeProperties.put(propertyName, propValue.replace(
                     host, "%HOSTGROUP::" + group.getName() + "%"));
@@ -697,4 +698,4 @@ public class BlueprintConfigurationProcessor {
     hbaseEnvMap.put("hbase_master_heapsize", new MPropertyUpdater());
     hbaseEnvMap.put("hbase_regionserver_heapsize", new MPropertyUpdater());
   }
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/6237724d/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClusterResourceProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClusterResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClusterResourceProvider.java
index 3fcfcd4..3498ffb 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClusterResourceProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClusterResourceProvider.java
@@ -303,6 +303,16 @@ public class ClusterResourceProvider extends BaseBlueprintProcessor {
   }
 
 
+  /**
+   * Package-level access for cluster config
+   * @return cluster config map
+   */
+  Map<String, Map<String, String>> getClusterConfigurations() {
+    return mapClusterConfigurations;
+  }
+
+
+
   // ----- utility methods ---------------------------------------------------
 
   /**
@@ -844,7 +854,7 @@ public class ClusterResourceProvider extends BaseBlueprintProcessor {
 
     BlueprintConfigurationProcessor configurationProcessor = new BlueprintConfigurationProcessor(mapClusterConfigurations);
     configurationProcessor.doUpdateForClusterCreate(blueprintHostGroups);
-    setMissingConfigurations();
+    setMissingConfigurations(blueprintHostGroups);
   }
 
   /**
@@ -908,14 +918,29 @@ public class ClusterResourceProvider extends BaseBlueprintProcessor {
   /**
    * Explicitly set any properties that are required but not currently provided in the stack definition.
    */
-  private void setMissingConfigurations() {
+  void setMissingConfigurations(Map<String, HostGroupImpl> blueprintHostGroups) {
     // AMBARI-5206
     final Map<String , String> userProps = new HashMap<String , String>();
-    userProps.put("oozie_user", "oozie-env");
-    userProps.put("hive_user", "hive-env");
-    userProps.put("hcat_user", "hive-env");
-    userProps.put("hbase_user", "hbase-env");
-    userProps.put("falcon_user", "falcon-env");
+
+    // only add user properties to the map for
+    // services actually included in the blueprint definition
+    if (isServiceIncluded("OOZIE", blueprintHostGroups)) {
+      userProps.put("oozie_user", "oozie-env");
+    }
+
+    if (isServiceIncluded("HIVE", blueprintHostGroups)) {
+      userProps.put("hive_user", "hive-env");
+      userProps.put("hcat_user", "hive-env");
+    }
+
+    if (isServiceIncluded("HBASE", blueprintHostGroups)) {
+      userProps.put("hbase_user", "hbase-env");
+    }
+
+    if (isServiceIncluded("FALCON", blueprintHostGroups)) {
+      userProps.put("falcon_user", "falcon-env");
+    }
+
 
     String proxyUserHosts  = "hadoop.proxyuser.%s.hosts";
     String proxyUserGroups = "hadoop.proxyuser.%s.groups";
@@ -923,14 +948,40 @@ public class ClusterResourceProvider extends BaseBlueprintProcessor {
     for (String property : userProps.keySet()) {
       String configType = userProps.get(property);
       Map<String, String> configs = mapClusterConfigurations.get(configType);
-      String user = configs.get(property);
-      if (user != null && !user.isEmpty()) {
-        ensureProperty("core-site", String.format(proxyUserHosts, user), "*");
-        ensureProperty("core-site", String.format(proxyUserGroups, user), "users");
+      if (configs != null) {
+        String user = configs.get(property);
+        if (user != null && !user.isEmpty()) {
+          ensureProperty("core-site", String.format(proxyUserHosts, user), "*");
+          ensureProperty("core-site", String.format(proxyUserGroups, user), "users");
+        }
+      } else {
+        LOG.debug("setMissingConfigurations: no user configuration found for type = " + configType + ".  This may be caused by an error in the blueprint configuration.");
       }
+
     }
   }
 
+
+  /**
+   * Determines if any components in the specified service are
+   *   included in the current blueprint's host group definitions.
+   *
+   * @param serviceName the Hadoop service name to query on
+   * @param blueprintHostGroups the map of Host Groups in the current blueprint
+   * @return true if the named service is included in the blueprint
+   *         false if the named service it not included in the blueprint
+   */
+  protected boolean isServiceIncluded(String serviceName, Map<String, HostGroupImpl> blueprintHostGroups) {
+    for (String hostGroupName : blueprintHostGroups.keySet()) {
+      HostGroupImpl hostGroup = blueprintHostGroups.get(hostGroupName);
+      if (hostGroup.getServices().contains(serviceName)) {
+        return true;
+      }
+    }
+
+    return false;
+  }
+
   /**
    * Ensure that the specified property exists.
    * If not, set a default value.

http://git-wip-us.apache.org/repos/asf/ambari/blob/6237724d/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ClusterResourceProviderTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ClusterResourceProviderTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ClusterResourceProviderTest.java
index b5c50ff..06d8bb7 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ClusterResourceProviderTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ClusterResourceProviderTest.java
@@ -25,6 +25,7 @@ import static org.easymock.EasyMock.createNiceMock;
 import static org.easymock.EasyMock.createStrictMock;
 import static org.easymock.EasyMock.eq;
 import static org.easymock.EasyMock.expect;
+import static org.easymock.EasyMock.isA;
 import static org.easymock.EasyMock.replay;
 import static org.easymock.EasyMock.verify;
 import static org.junit.Assert.assertEquals;
@@ -71,6 +72,7 @@ import org.apache.ambari.server.orm.entities.BlueprintEntity;
 import org.apache.ambari.server.orm.entities.HostGroupComponentEntity;
 import org.apache.ambari.server.orm.entities.HostGroupConfigEntity;
 import org.apache.ambari.server.orm.entities.HostGroupEntity;
+import org.apache.ambari.server.state.AutoDeployInfo;
 import org.apache.ambari.server.state.Clusters;
 import org.apache.ambari.server.state.ConfigHelper;
 import org.apache.ambari.server.state.DependencyInfo;
@@ -78,7 +80,9 @@ import org.apache.ambari.server.state.PropertyInfo;
 import org.apache.ambari.server.state.State;
 import org.easymock.Capture;
 import org.easymock.EasyMock;
+import org.easymock.EasyMockSupport;
 import org.junit.Assert;
+import org.junit.Before;
 import org.junit.Test;
 
 import com.google.gson.Gson;
@@ -87,6 +91,14 @@ import com.google.gson.Gson;
  * ClusterResourceProvider tests.
  */
 public class ClusterResourceProviderTest {
+
+  @Before
+  public void setup() throws Exception {
+    // reset this static field, to allow unit tests to function independently
+    BaseBlueprintProcessor.stackInfo = null;
+  }
+
+
   @Test
   public void testCreateResources() throws Exception{
     Resource.Type type = Resource.Type.Cluster;
@@ -440,7 +452,12 @@ public class ClusterResourceProviderTest {
     PersistKeyValueService.init(persistKeyValue);
     ResourceProvider provider = new TestClusterResourceProvider(
         managementController, serviceResourceProvider, componentResourceProvider,
-        hostResourceProvider, hostComponentResourceProvider, configGroupResourceProvider);
+        hostResourceProvider, hostComponentResourceProvider, configGroupResourceProvider) {
+      @Override
+      protected boolean isServiceIncluded(String serviceName, Map<String, HostGroupImpl> blueprintHostGroups) {
+        return true;
+      }
+    };
 
     RequestStatus requestStatus = provider.createResources(request);
 
@@ -1644,7 +1661,12 @@ public class ClusterResourceProviderTest {
     PersistKeyValueService.init(persistKeyValue);
     ResourceProvider provider = new TestClusterResourceProvider(
         managementController, serviceResourceProvider, componentResourceProvider,
-        hostResourceProvider, hostComponentResourceProvider, configGroupResourceProvider);
+        hostResourceProvider, hostComponentResourceProvider, configGroupResourceProvider) {
+      @Override
+      protected boolean isServiceIncluded(String serviceName, Map<String, HostGroupImpl> blueprintHostGroups) {
+        return true;
+      }
+    };
 
     RequestStatus requestStatus = provider.createResources(request);
 
@@ -2336,7 +2358,12 @@ public class ClusterResourceProviderTest {
     PersistKeyValueService.init(persistKeyValue);
     ResourceProvider provider = new TestClusterResourceProvider(
         managementController, serviceResourceProvider, componentResourceProvider,
-        hostResourceProvider, hostComponentResourceProvider, configGroupResourceProvider);
+        hostResourceProvider, hostComponentResourceProvider, configGroupResourceProvider) {
+      @Override
+      protected boolean isServiceIncluded(String serviceName, Map<String, HostGroupImpl> blueprintHostGroups) {
+        return true;
+      }
+    };
 
     RequestStatus requestStatus = provider.createResources(request);
 
@@ -2826,6 +2853,470 @@ public class ClusterResourceProviderTest {
     verify(managementController, response, clusters);
   }
 
+  @Test
+  public void testSetMissingConfigurationsOozieIncluded() throws Exception {
+    EasyMockSupport mockSupport = new EasyMockSupport();
+
+    AmbariManagementController mockMgmtController =
+      mockSupport.createMock(AmbariManagementController.class);
+    ResourceProvider mockServiceProvider =
+      mockSupport.createMock(ResourceProvider.class);
+    ResourceProvider mockComponentProvider =
+      mockSupport.createMock(ResourceProvider.class);
+    ResourceProvider mockHostProvider =
+      mockSupport.createMock(ResourceProvider.class);
+    ResourceProvider mockHostComponentProvider =
+      mockSupport.createMock(ResourceProvider.class);
+    ResourceProvider mockConfigGroupProvider =
+      mockSupport.createMock(ResourceProvider.class);
+    AmbariManagementController mockManagementController =
+      mockSupport.createMock(AmbariManagementController.class);
+    StackServiceResponse mockStackServiceResponseOne =
+      mockSupport.createMock(StackServiceResponse.class);
+    StackServiceComponentResponse mockStackComponentResponse =
+      mockSupport.createMock(StackServiceComponentResponse.class);
+    AmbariMetaInfo mockAmbariMetaInfo =
+      mockSupport.createMock(AmbariMetaInfo.class);
+
+    expect(mockStackComponentResponse.getComponentName()).andReturn("OOZIE_SERVER");
+    expect(mockStackComponentResponse.getCardinality()).andReturn("1");
+    expect(mockStackComponentResponse.getAutoDeploy()).andReturn(new AutoDeployInfo());
+
+
+    expect(mockStackServiceResponseOne.getServiceName()).andReturn("OOZIE");
+    expect(mockManagementController.getStackServices(isA(Set.class))).andReturn(Collections.singleton(mockStackServiceResponseOne));
+    expect(mockManagementController.getStackComponents(isA(Set.class))).andReturn(Collections.singleton(mockStackComponentResponse));
+    expect(mockManagementController.getStackConfigurations(isA(Set.class))).andReturn(Collections.<StackConfigurationResponse>emptySet());
+
+    expect(mockAmbariMetaInfo.getComponentDependencies("HDP", "2.1", "OOZIE", "OOZIE_SERVER")).andReturn(Collections.<DependencyInfo>emptyList());
+
+    mockSupport.replayAll();
+
+
+    ClusterResourceProvider.init(null, mockAmbariMetaInfo, null);
+
+    BaseBlueprintProcessor.Stack stack =
+      new BaseBlueprintProcessor.Stack("HDP", "2.1", mockManagementController);
+
+    ClusterResourceProvider clusterResourceProvider =
+      new TestClusterResourceProvider(mockMgmtController, mockServiceProvider,
+        mockComponentProvider, mockHostProvider, mockHostComponentProvider, mockConfigGroupProvider);
+
+
+    HostGroupEntity hostGroup = new HostGroupEntity();
+    hostGroup.setComponents(Collections.<HostGroupComponentEntity>emptyList());
+    HostGroupConfigEntity configEntity = new HostGroupConfigEntity();
+    configEntity.setConfigData("");
+
+    hostGroup.setConfigurations(Collections.singletonList(configEntity));
+    BaseBlueprintProcessor.HostGroupImpl hostGroupImpl =
+      new BaseBlueprintProcessor.HostGroupImpl(hostGroup, stack, null);
+    hostGroupImpl.addComponent("OOZIE_SERVER");
+
+    // add empty map for core-site, to simulate this configuration entry
+    clusterResourceProvider.getClusterConfigurations().put("core-site", new HashMap<String, String>());
+    clusterResourceProvider.getClusterConfigurations().put("oozie-env", new HashMap<String, String>());
+    clusterResourceProvider.getClusterConfigurations().get("oozie-env").put("oozie_user", "oozie");
+
+    clusterResourceProvider.setMissingConfigurations(Collections.singletonMap("host_group_one", hostGroupImpl));
+
+    Map<String, String> mapCoreSiteConfig =
+      clusterResourceProvider.getClusterConfigurations().get("core-site");
+
+    assertNotNull("core-site map was null.", mapCoreSiteConfig);
+    assertEquals("Incorrect number of entries in the core-site config map",
+                 2, mapCoreSiteConfig.size());
+    assertEquals("Incorrect value for proxy hosts",
+                 "*", mapCoreSiteConfig.get("hadoop.proxyuser.oozie.hosts"));
+    assertEquals("Incorrect value for proxy hosts",
+      "users", mapCoreSiteConfig.get("hadoop.proxyuser.oozie.groups"));
+
+    mockSupport.verifyAll();
+  }
+
+
+  @Test
+  public void testSetMissingConfigurationsFalconIncluded() throws Exception {
+    EasyMockSupport mockSupport = new EasyMockSupport();
+
+    AmbariManagementController mockMgmtController =
+      mockSupport.createMock(AmbariManagementController.class);
+    ResourceProvider mockServiceProvider =
+      mockSupport.createMock(ResourceProvider.class);
+    ResourceProvider mockComponentProvider =
+      mockSupport.createMock(ResourceProvider.class);
+    ResourceProvider mockHostProvider =
+      mockSupport.createMock(ResourceProvider.class);
+    ResourceProvider mockHostComponentProvider =
+      mockSupport.createMock(ResourceProvider.class);
+    ResourceProvider mockConfigGroupProvider =
+      mockSupport.createMock(ResourceProvider.class);
+    AmbariManagementController mockManagementController =
+      mockSupport.createMock(AmbariManagementController.class);
+    StackServiceResponse mockStackServiceResponseOne =
+      mockSupport.createMock(StackServiceResponse.class);
+    StackServiceComponentResponse mockStackComponentResponse =
+      mockSupport.createMock(StackServiceComponentResponse.class);
+    AmbariMetaInfo mockAmbariMetaInfo =
+      mockSupport.createMock(AmbariMetaInfo.class);
+
+    expect(mockStackComponentResponse.getComponentName()).andReturn("FALCON_SERVER");
+    expect(mockStackComponentResponse.getCardinality()).andReturn("1");
+    expect(mockStackComponentResponse.getAutoDeploy()).andReturn(new AutoDeployInfo());
+
+
+    expect(mockStackServiceResponseOne.getServiceName()).andReturn("FALCON");
+    expect(mockManagementController.getStackServices(isA(Set.class))).andReturn(Collections.singleton(mockStackServiceResponseOne));
+    expect(mockManagementController.getStackComponents(isA(Set.class))).andReturn(Collections.singleton(mockStackComponentResponse));
+    expect(mockManagementController.getStackConfigurations(isA(Set.class))).andReturn(Collections.<StackConfigurationResponse>emptySet());
+
+    expect(mockAmbariMetaInfo.getComponentDependencies("HDP", "2.1", "FALCON", "FALCON_SERVER")).andReturn(Collections.<DependencyInfo>emptyList());
+
+    mockSupport.replayAll();
+
+    ClusterResourceProvider.init(null, mockAmbariMetaInfo, null);
+
+    BaseBlueprintProcessor.Stack stack =
+      new BaseBlueprintProcessor.Stack("HDP", "2.1", mockManagementController);
+
+    ClusterResourceProvider clusterResourceProvider =
+      new TestClusterResourceProvider(mockMgmtController, mockServiceProvider,
+        mockComponentProvider, mockHostProvider, mockHostComponentProvider, mockConfigGroupProvider);
+
+    HostGroupEntity hostGroup = new HostGroupEntity();
+    hostGroup.setComponents(Collections.<HostGroupComponentEntity>emptyList());
+    HostGroupConfigEntity configEntity = new HostGroupConfigEntity();
+    configEntity.setConfigData("");
+
+    hostGroup.setConfigurations(Collections.singletonList(configEntity));
+    BaseBlueprintProcessor.HostGroupImpl hostGroupImpl =
+      new BaseBlueprintProcessor.HostGroupImpl(hostGroup, stack, null);
+    hostGroupImpl.addComponent("FALCON_SERVER");
+
+    // add empty map for core-site, to simulate this configuration entry
+    clusterResourceProvider.getClusterConfigurations().put("core-site", new HashMap<String, String>());
+    clusterResourceProvider.getClusterConfigurations().put("falcon-env", new HashMap<String, String>());
+    clusterResourceProvider.getClusterConfigurations().get("falcon-env").put("falcon_user", "falcon");
+
+    clusterResourceProvider.setMissingConfigurations(Collections.singletonMap("host_group_one", hostGroupImpl));
+
+    Map<String, String> mapCoreSiteConfig =
+      clusterResourceProvider.getClusterConfigurations().get("core-site");
+
+    assertNotNull("core-site map was null.", mapCoreSiteConfig);
+    assertEquals("Incorrect number of entries in the core-site config map",
+      2, mapCoreSiteConfig.size());
+    assertEquals("Incorrect value for proxy hosts",
+      "*", mapCoreSiteConfig.get("hadoop.proxyuser.falcon.hosts"));
+    assertEquals("Incorrect value for proxy hosts",
+      "users", mapCoreSiteConfig.get("hadoop.proxyuser.falcon.groups"));
+
+    mockSupport.verifyAll();
+  }
+
+
+  @Test
+  public void testSetMissingConfigurationsOozieNotIncluded() throws Exception {
+    EasyMockSupport mockSupport = new EasyMockSupport();
+
+    AmbariManagementController mockMgmtController =
+      mockSupport.createMock(AmbariManagementController.class);
+    ResourceProvider mockServiceProvider =
+      mockSupport.createMock(ResourceProvider.class);
+    ResourceProvider mockComponentProvider =
+      mockSupport.createMock(ResourceProvider.class);
+    ResourceProvider mockHostProvider =
+      mockSupport.createMock(ResourceProvider.class);
+    ResourceProvider mockHostComponentProvider =
+      mockSupport.createMock(ResourceProvider.class);
+    ResourceProvider mockConfigGroupProvider =
+      mockSupport.createMock(ResourceProvider.class);
+    AmbariManagementController mockManagementController =
+      mockSupport.createMock(AmbariManagementController.class);
+    StackServiceResponse mockStackServiceResponseOne =
+      mockSupport.createMock(StackServiceResponse.class);
+    StackServiceComponentResponse mockStackComponentResponse =
+      mockSupport.createMock(StackServiceComponentResponse.class);
+    AmbariMetaInfo mockAmbariMetaInfo =
+      mockSupport.createMock(AmbariMetaInfo.class);
+
+    expect(mockStackComponentResponse.getComponentName()).andReturn("OOZIE_SERVER");
+    expect(mockStackComponentResponse.getCardinality()).andReturn("1");
+    expect(mockStackComponentResponse.getAutoDeploy()).andReturn(new AutoDeployInfo());
+
+
+    expect(mockStackServiceResponseOne.getServiceName()).andReturn("OOZIE");
+    expect(mockManagementController.getStackServices(isA(Set.class))).andReturn(Collections.singleton(mockStackServiceResponseOne));
+    expect(mockManagementController.getStackComponents(isA(Set.class))).andReturn(Collections.singleton(mockStackComponentResponse));
+    expect(mockManagementController.getStackConfigurations(isA(Set.class))).andReturn(Collections.<StackConfigurationResponse>emptySet());
+
+    expect(mockAmbariMetaInfo.getComponentDependencies("HDP", "2.1", "OOZIE", "OOZIE_SERVER")).andReturn(Collections.<DependencyInfo>emptyList());
+
+    mockSupport.replayAll();
+
+    ClusterResourceProvider.init(null, mockAmbariMetaInfo, null);
+
+    BaseBlueprintProcessor.Stack stack =
+      new BaseBlueprintProcessor.Stack("HDP", "2.1", mockManagementController);
+
+    ClusterResourceProvider clusterResourceProvider =
+      new TestClusterResourceProvider(mockMgmtController, mockServiceProvider,
+        mockComponentProvider, mockHostProvider, mockHostComponentProvider, mockConfigGroupProvider);
+
+
+    HostGroupEntity hostGroup = new HostGroupEntity();
+    hostGroup.setComponents(Collections.<HostGroupComponentEntity>emptyList());
+    HostGroupConfigEntity configEntity = new HostGroupConfigEntity();
+    configEntity.setConfigData("");
+
+    hostGroup.setConfigurations(Collections.singletonList(configEntity));
+    BaseBlueprintProcessor.HostGroupImpl hostGroupImpl =
+      new BaseBlueprintProcessor.HostGroupImpl(hostGroup, stack, null);
+    hostGroupImpl.addComponent("COMPONENT_ONE");
+
+    // add empty map for core-site, to simulate this configuration entry
+    clusterResourceProvider.getClusterConfigurations().put("core-site", new HashMap<String, String>());
+
+    clusterResourceProvider.setMissingConfigurations(Collections.singletonMap("host_group_one", hostGroupImpl));
+
+    Map<String, String> mapCoreSiteConfig =
+      clusterResourceProvider.getClusterConfigurations().get("core-site");
+
+    assertNotNull("core-site map was null.", mapCoreSiteConfig);
+    assertEquals("Incorrect number of entries in the core-site config map",
+                0, mapCoreSiteConfig.size());
+
+    mockSupport.verifyAll();
+
+  }
+
+
+  @Test
+  public void testSetMissingConfigurationsFalconNotIncluded() throws Exception {
+    EasyMockSupport mockSupport = new EasyMockSupport();
+
+    AmbariManagementController mockMgmtController =
+      mockSupport.createMock(AmbariManagementController.class);
+    ResourceProvider mockServiceProvider =
+      mockSupport.createMock(ResourceProvider.class);
+    ResourceProvider mockComponentProvider =
+      mockSupport.createMock(ResourceProvider.class);
+    ResourceProvider mockHostProvider =
+      mockSupport.createMock(ResourceProvider.class);
+    ResourceProvider mockHostComponentProvider =
+      mockSupport.createMock(ResourceProvider.class);
+    ResourceProvider mockConfigGroupProvider =
+      mockSupport.createMock(ResourceProvider.class);
+    AmbariManagementController mockManagementController =
+      mockSupport.createMock(AmbariManagementController.class);
+    StackServiceResponse mockStackServiceResponseOne =
+      mockSupport.createMock(StackServiceResponse.class);
+    StackServiceComponentResponse mockStackComponentResponse =
+      mockSupport.createMock(StackServiceComponentResponse.class);
+    AmbariMetaInfo mockAmbariMetaInfo =
+      mockSupport.createMock(AmbariMetaInfo.class);
+
+    expect(mockStackComponentResponse.getComponentName()).andReturn("FALCON_SERVER");
+    expect(mockStackComponentResponse.getCardinality()).andReturn("1");
+    expect(mockStackComponentResponse.getAutoDeploy()).andReturn(new AutoDeployInfo());
+
+
+    expect(mockStackServiceResponseOne.getServiceName()).andReturn("FALCON");
+    expect(mockManagementController.getStackServices(isA(Set.class))).andReturn(Collections.singleton(mockStackServiceResponseOne));
+    expect(mockManagementController.getStackComponents(isA(Set.class))).andReturn(Collections.singleton(mockStackComponentResponse));
+    expect(mockManagementController.getStackConfigurations(isA(Set.class))).andReturn(Collections.<StackConfigurationResponse>emptySet());
+
+    expect(mockAmbariMetaInfo.getComponentDependencies("HDP", "2.1", "FALCON", "FALCON_SERVER")).andReturn(Collections.<DependencyInfo>emptyList());
+
+    mockSupport.replayAll();
+
+    ClusterResourceProvider.init(null, mockAmbariMetaInfo, null);
+
+    BaseBlueprintProcessor.Stack stack =
+      new BaseBlueprintProcessor.Stack("HDP", "2.1", mockManagementController);
+
+    ClusterResourceProvider clusterResourceProvider =
+      new TestClusterResourceProvider(mockMgmtController, mockServiceProvider,
+        mockComponentProvider, mockHostProvider, mockHostComponentProvider, mockConfigGroupProvider);
+
+    HostGroupEntity hostGroup = new HostGroupEntity();
+    hostGroup.setComponents(Collections.<HostGroupComponentEntity>emptyList());
+    HostGroupConfigEntity configEntity = new HostGroupConfigEntity();
+    configEntity.setConfigData("");
+
+    hostGroup.setConfigurations(Collections.singletonList(configEntity));
+    BaseBlueprintProcessor.HostGroupImpl hostGroupImpl =
+      new BaseBlueprintProcessor.HostGroupImpl(hostGroup, stack, null);
+    // blueprint request will not include a reference to FALCON_SERVER
+    hostGroupImpl.addComponent("COMPONENT_ONE");
+
+    // add empty map for core-site, to simulate this configuration entry
+    clusterResourceProvider.getClusterConfigurations().put("core-site", new HashMap<String, String>());
+
+    clusterResourceProvider.setMissingConfigurations(Collections.singletonMap("host_group_one", hostGroupImpl));
+
+    Map<String, String> mapCoreSiteConfig =
+      clusterResourceProvider.getClusterConfigurations().get("core-site");
+
+    assertNotNull("core-site map was null.", mapCoreSiteConfig);
+    assertEquals("Incorrect number of entries in the core-site config map",
+      0, mapCoreSiteConfig.size());
+
+    mockSupport.verifyAll();
+
+  }
+
+
+  @Test
+  public void testSetMissingConfigurationsHiveNotIncluded() throws Exception {
+    EasyMockSupport mockSupport = new EasyMockSupport();
+
+    AmbariManagementController mockMgmtController =
+      mockSupport.createMock(AmbariManagementController.class);
+    ResourceProvider mockServiceProvider =
+      mockSupport.createMock(ResourceProvider.class);
+    ResourceProvider mockComponentProvider =
+      mockSupport.createMock(ResourceProvider.class);
+    ResourceProvider mockHostProvider =
+      mockSupport.createMock(ResourceProvider.class);
+    ResourceProvider mockHostComponentProvider =
+      mockSupport.createMock(ResourceProvider.class);
+    ResourceProvider mockConfigGroupProvider =
+      mockSupport.createMock(ResourceProvider.class);
+    AmbariManagementController mockManagementController =
+      mockSupport.createMock(AmbariManagementController.class);
+    StackServiceResponse mockStackServiceResponseOne =
+      mockSupport.createMock(StackServiceResponse.class);
+    StackServiceComponentResponse mockStackComponentResponse =
+      mockSupport.createMock(StackServiceComponentResponse.class);
+    AmbariMetaInfo mockAmbariMetaInfo =
+      mockSupport.createMock(AmbariMetaInfo.class);
+
+    expect(mockStackComponentResponse.getComponentName()).andReturn("HIVE_SERVER");
+    expect(mockStackComponentResponse.getCardinality()).andReturn("1");
+    expect(mockStackComponentResponse.getAutoDeploy()).andReturn(new AutoDeployInfo());
+
+    expect(mockStackServiceResponseOne.getServiceName()).andReturn("HIVE");
+    expect(mockManagementController.getStackServices(isA(Set.class))).andReturn(Collections.singleton(mockStackServiceResponseOne));
+    expect(mockManagementController.getStackComponents(isA(Set.class))).andReturn(Collections.singleton(mockStackComponentResponse));
+    expect(mockManagementController.getStackConfigurations(isA(Set.class))).andReturn(Collections.<StackConfigurationResponse>emptySet());
+
+    expect(mockAmbariMetaInfo.getComponentDependencies("HDP", "2.1", "HIVE", "HIVE_SERVER")).andReturn(Collections.<DependencyInfo>emptyList());
+
+    mockSupport.replayAll();
+
+    ClusterResourceProvider.init(null, mockAmbariMetaInfo, null);
+
+    BaseBlueprintProcessor.Stack stack =
+      new BaseBlueprintProcessor.Stack("HDP", "2.1", mockManagementController);
+
+    ClusterResourceProvider clusterResourceProvider =
+      new TestClusterResourceProvider(mockMgmtController, mockServiceProvider,
+        mockComponentProvider, mockHostProvider, mockHostComponentProvider, mockConfigGroupProvider);
+
+    HostGroupEntity hostGroup = new HostGroupEntity();
+    hostGroup.setComponents(Collections.<HostGroupComponentEntity>emptyList());
+    HostGroupConfigEntity configEntity = new HostGroupConfigEntity();
+    configEntity.setConfigData("");
+
+    hostGroup.setConfigurations(Collections.singletonList(configEntity));
+    BaseBlueprintProcessor.HostGroupImpl hostGroupImpl =
+      new BaseBlueprintProcessor.HostGroupImpl(hostGroup, stack, null);
+    // blueprint request will not include a reference to a HIVE component
+    hostGroupImpl.addComponent("COMPONENT_ONE");
+
+    // add empty map for core-site, to simulate this configuration entry
+    clusterResourceProvider.getClusterConfigurations().put("core-site", new HashMap<String, String>());
+
+    clusterResourceProvider.setMissingConfigurations(Collections.singletonMap("host_group_one", hostGroupImpl));
+
+    Map<String, String> mapCoreSiteConfig =
+      clusterResourceProvider.getClusterConfigurations().get("core-site");
+
+    assertNotNull("core-site map was null.", mapCoreSiteConfig);
+    assertEquals("Incorrect number of entries in the core-site config map",
+      0, mapCoreSiteConfig.size());
+
+    mockSupport.verifyAll();
+
+  }
+
+
+  @Test
+  public void testSetMissingConfigurationsHBaseNotIncluded() throws Exception {
+    EasyMockSupport mockSupport = new EasyMockSupport();
+
+    AmbariManagementController mockMgmtController =
+      mockSupport.createMock(AmbariManagementController.class);
+    ResourceProvider mockServiceProvider =
+      mockSupport.createMock(ResourceProvider.class);
+    ResourceProvider mockComponentProvider =
+      mockSupport.createMock(ResourceProvider.class);
+    ResourceProvider mockHostProvider =
+      mockSupport.createMock(ResourceProvider.class);
+    ResourceProvider mockHostComponentProvider =
+      mockSupport.createMock(ResourceProvider.class);
+    ResourceProvider mockConfigGroupProvider =
+      mockSupport.createMock(ResourceProvider.class);
+    AmbariManagementController mockManagementController =
+      mockSupport.createMock(AmbariManagementController.class);
+    StackServiceResponse mockStackServiceResponseOne =
+      mockSupport.createMock(StackServiceResponse.class);
+    StackServiceComponentResponse mockStackComponentResponse =
+      mockSupport.createMock(StackServiceComponentResponse.class);
+    AmbariMetaInfo mockAmbariMetaInfo =
+      mockSupport.createMock(AmbariMetaInfo.class);
+
+    expect(mockStackComponentResponse.getComponentName()).andReturn("HBASE_SERVER");
+    expect(mockStackComponentResponse.getCardinality()).andReturn("1");
+    expect(mockStackComponentResponse.getAutoDeploy()).andReturn(new AutoDeployInfo());
+
+    expect(mockStackServiceResponseOne.getServiceName()).andReturn("HBASE");
+    expect(mockManagementController.getStackServices(isA(Set.class))).andReturn(Collections.singleton(mockStackServiceResponseOne));
+    expect(mockManagementController.getStackComponents(isA(Set.class))).andReturn(Collections.singleton(mockStackComponentResponse));
+    expect(mockManagementController.getStackConfigurations(isA(Set.class))).andReturn(Collections.<StackConfigurationResponse>emptySet());
+
+    expect(mockAmbariMetaInfo.getComponentDependencies("HDP", "2.1", "HBASE", "HBASE_SERVER")).andReturn(Collections.<DependencyInfo>emptyList());
+
+    mockSupport.replayAll();
+
+    ClusterResourceProvider.init(null, mockAmbariMetaInfo, null);
+
+    BaseBlueprintProcessor.Stack stack =
+      new BaseBlueprintProcessor.Stack("HDP", "2.1", mockManagementController);
+
+    ClusterResourceProvider clusterResourceProvider =
+      new TestClusterResourceProvider(mockMgmtController, mockServiceProvider,
+        mockComponentProvider, mockHostProvider, mockHostComponentProvider, mockConfigGroupProvider);
+
+    HostGroupEntity hostGroup = new HostGroupEntity();
+    hostGroup.setComponents(Collections.<HostGroupComponentEntity>emptyList());
+    HostGroupConfigEntity configEntity = new HostGroupConfigEntity();
+    configEntity.setConfigData("");
+
+    hostGroup.setConfigurations(Collections.singletonList(configEntity));
+    BaseBlueprintProcessor.HostGroupImpl hostGroupImpl =
+      new BaseBlueprintProcessor.HostGroupImpl(hostGroup, stack, null);
+    // blueprint request will not include a reference to an HBASE component
+    hostGroupImpl.addComponent("COMPONENT_ONE");
+
+    // add empty map for core-site, to simulate this configuration entry
+    clusterResourceProvider.getClusterConfigurations().put("core-site", new HashMap<String, String>());
+
+    clusterResourceProvider.setMissingConfigurations(Collections.singletonMap("host_group_one", hostGroupImpl));
+
+    Map<String, String> mapCoreSiteConfig =
+      clusterResourceProvider.getClusterConfigurations().get("core-site");
+
+    assertNotNull("core-site map was null.", mapCoreSiteConfig);
+    assertEquals("Incorrect number of entries in the core-site config map",
+      0, mapCoreSiteConfig.size());
+
+    mockSupport.verifyAll();
+
+  }
+
+
   private class TestClusterResourceProvider extends ClusterResourceProvider {
 
     private ResourceProvider serviceResourceProvider;


[07/35] git commit: AMBARI-7009. BE: Configs recommendation API generates incorrect host-groups

Posted by jo...@apache.org.
AMBARI-7009. BE: Configs recommendation API generates incorrect host-groups


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/336a63da
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/336a63da
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/336a63da

Branch: refs/heads/branch-alerts-dev
Commit: 336a63dac9436d35532135e1e719fa3f8b6ac12f
Parents: 98660b9
Author: Srimanth Gunturi <sg...@hortonworks.com>
Authored: Mon Aug 25 17:18:01 2014 -0700
Committer: Srimanth Gunturi <sg...@hortonworks.com>
Committed: Mon Aug 25 17:41:24 2014 -0700

----------------------------------------------------------------------
 .../GetConfigurationRecommnedationCommand.java  |   5 +-
 ...tConfigurationRecommnedationCommandTest.java | 103 +++++++++++++++++++
 2 files changed, 105 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/336a63da/ambari-server/src/main/java/org/apache/ambari/server/api/services/stackadvisor/commands/GetConfigurationRecommnedationCommand.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/api/services/stackadvisor/commands/GetConfigurationRecommnedationCommand.java b/ambari-server/src/main/java/org/apache/ambari/server/api/services/stackadvisor/commands/GetConfigurationRecommnedationCommand.java
index 52df3d0..b20c966 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/api/services/stackadvisor/commands/GetConfigurationRecommnedationCommand.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/api/services/stackadvisor/commands/GetConfigurationRecommnedationCommand.java
@@ -63,10 +63,9 @@ public class GetConfigurationRecommnedationCommand extends
     return response;
   }
 
-  private Set<HostGroup> processHostGroups(StackAdvisorRequest request) {
-
+  protected Set<HostGroup> processHostGroups(StackAdvisorRequest request) {
     Set<HostGroup> resultSet = new HashSet<HostGroup>();
-    for (Map.Entry<String, Set<String>> componentHost : request.getComponentHostsMap().entrySet()) {
+    for (Map.Entry<String, Set<String>> componentHost : request.getHostComponents().entrySet()) {
       String hostGroupName = componentHost.getKey();
       Set<String> components = componentHost.getValue();
       if (hostGroupName != null && components != null) {

http://git-wip-us.apache.org/repos/asf/ambari/blob/336a63da/ambari-server/src/test/java/org/apache/ambari/server/api/services/stackadvisor/commands/GetConfigurationRecommnedationCommandTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/api/services/stackadvisor/commands/GetConfigurationRecommnedationCommandTest.java b/ambari-server/src/test/java/org/apache/ambari/server/api/services/stackadvisor/commands/GetConfigurationRecommnedationCommandTest.java
new file mode 100644
index 0000000..1bd66a4
--- /dev/null
+++ b/ambari-server/src/test/java/org/apache/ambari/server/api/services/stackadvisor/commands/GetConfigurationRecommnedationCommandTest.java
@@ -0,0 +1,103 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.api.services.stackadvisor.commands;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+import static org.mockito.Mockito.doReturn;
+import static org.mockito.Mockito.mock;
+
+import java.io.File;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Map;
+import java.util.Set;
+
+import org.apache.ambari.server.api.services.AmbariMetaInfo;
+import org.apache.ambari.server.api.services.stackadvisor.StackAdvisorRequest;
+import org.apache.ambari.server.api.services.stackadvisor.StackAdvisorRunner;
+import org.apache.ambari.server.api.services.stackadvisor.recommendations.RecommendationResponse;
+import org.junit.Test;
+
+public class GetConfigurationRecommnedationCommandTest {
+
+  @Test
+  public void testProcessHostGroups() throws Exception {
+    StackAdvisorRunner saRunner = mock(StackAdvisorRunner.class);
+    File file = mock(File.class);
+    AmbariMetaInfo metaInfo = mock(AmbariMetaInfo.class);
+    GetConfigurationRecommnedationCommand command = new GetConfigurationRecommnedationCommand(file, "script", 1, saRunner, metaInfo);
+
+    StackAdvisorRequest request = mock(StackAdvisorRequest.class);
+    Map<String, Set<String>> componentHostGroupMap = new HashMap<String, Set<String>>();
+    Set<String> components1 = new HashSet<String>();
+    components1.add("component1");
+    components1.add("component4");
+    components1.add("component5");
+    componentHostGroupMap.put("group1", components1);
+    Set<String> components2 = new HashSet<String>();
+    components2.add("component2");
+    components2.add("component3");
+    componentHostGroupMap.put("group2", components2);
+    doReturn(componentHostGroupMap).when(request).getHostComponents();
+    Set<RecommendationResponse.HostGroup> hostGroups = command.processHostGroups(request);
+
+    assertNotNull(hostGroups);
+    assertEquals(2, hostGroups.size());
+    Map<String, RecommendationResponse.HostGroup> hostGroupMap =
+        new HashMap<String, RecommendationResponse.HostGroup>();
+    for (RecommendationResponse.HostGroup hostGroup : hostGroups) {
+      hostGroupMap.put(hostGroup.getName(), hostGroup);
+    }
+    RecommendationResponse.HostGroup hostGroup1 = hostGroupMap.get("group1");
+    assertNotNull(hostGroup1);
+    Set<Map<String, String>> host1Components = hostGroup1.getComponents();
+    assertNotNull(host1Components);
+    assertEquals(3, host1Components.size());
+    Set<String> componentNames1 = new HashSet<String>();
+    for (Map<String, String> host1Component : host1Components) {
+      assertNotNull(host1Component);
+      assertEquals(1, host1Component.size());
+      String componentName = host1Component.get("name");
+      assertNotNull(componentName);
+      componentNames1.add(componentName);
+    }
+    assertEquals(3, componentNames1.size());
+    assertTrue(componentNames1.contains("component1"));
+    assertTrue(componentNames1.contains("component4"));
+    assertTrue(componentNames1.contains("component5"));
+    RecommendationResponse.HostGroup hostGroup2 = hostGroupMap.get("group2");
+    assertNotNull(hostGroup2);
+    Set<Map<String, String>> host2Components = hostGroup2.getComponents();
+    assertNotNull(host2Components);
+    assertEquals(2, host2Components.size());
+    Set<String> componentNames2 = new HashSet<String>();
+    for (Map<String, String> host2Component : host2Components) {
+      assertNotNull(host2Component);
+      assertEquals(1, host2Component.size());
+      String componentName = host2Component.get("name");
+      assertNotNull(componentName);
+      componentNames2.add(componentName);
+    }
+    assertEquals(2, componentNames2.size());
+    assertTrue(componentNames2.contains("component2"));
+    assertTrue(componentNames2.contains("component3"));
+  }
+}
\ No newline at end of file


[08/35] git commit: AMBARI-7010. Recommendation and validation hrefs contain null resource number

Posted by jo...@apache.org.
AMBARI-7010. Recommendation and validation hrefs contain null resource number


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/a80066ae
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/a80066ae
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/a80066ae

Branch: refs/heads/branch-alerts-dev
Commit: a80066ae09dc3ccda574b014028a87136573a4c2
Parents: 336a63d
Author: Srimanth Gunturi <sg...@hortonworks.com>
Authored: Mon Aug 25 17:25:15 2014 -0700
Committer: Srimanth Gunturi <sg...@hortonworks.com>
Committed: Mon Aug 25 17:41:30 2014 -0700

----------------------------------------------------------------------
 .../stackadvisor/StackAdvisorResponse.java      | 73 ++++++++++++++++++++
 .../commands/StackAdvisorCommand.java           | 10 ++-
 .../recommendations/RecommendationResponse.java | 38 +---------
 .../validations/ValidationResponse.java         | 15 +---
 .../RecommendationResourceProvider.java         |  4 +-
 .../internal/ValidationResourceProvider.java    |  4 +-
 .../commands/StackAdvisorCommandTest.java       |  6 +-
 .../RecommendationResourceProviderTest.java     | 31 +++++++++
 .../ValidationResourceProviderTest.java         | 70 +++++++++++++++++++
 9 files changed, 194 insertions(+), 57 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/a80066ae/ambari-server/src/main/java/org/apache/ambari/server/api/services/stackadvisor/StackAdvisorResponse.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/api/services/stackadvisor/StackAdvisorResponse.java b/ambari-server/src/main/java/org/apache/ambari/server/api/services/stackadvisor/StackAdvisorResponse.java
new file mode 100644
index 0000000..b6d7de7
--- /dev/null
+++ b/ambari-server/src/main/java/org/apache/ambari/server/api/services/stackadvisor/StackAdvisorResponse.java
@@ -0,0 +1,73 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.api.services.stackadvisor;
+
+import org.codehaus.jackson.annotate.JsonProperty;
+
+/**
+ * Abstract stack advisor response POJO.
+ */
+public abstract class StackAdvisorResponse {
+
+  private int id;
+
+  @JsonProperty("Versions")
+  private Version version;
+
+  public int getId() {
+    return id;
+  }
+
+  public void setId(int id) {
+    this.id = id;
+  }
+
+  public Version getVersion() {
+    return version;
+  }
+
+  public void setVersion(Version version) {
+    this.version = version;
+  }
+
+  public static class Version {
+    @JsonProperty("stack_name")
+    private String stackName;
+
+    @JsonProperty("stack_version")
+    private String stackVersion;
+
+    public String getStackName() {
+      return stackName;
+    }
+
+    public void setStackName(String stackName) {
+      this.stackName = stackName;
+    }
+
+    public String getStackVersion() {
+      return stackVersion;
+    }
+
+    public void setStackVersion(String stackVersion) {
+      this.stackVersion = stackVersion;
+    }
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/a80066ae/ambari-server/src/main/java/org/apache/ambari/server/api/services/stackadvisor/commands/StackAdvisorCommand.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/api/services/stackadvisor/commands/StackAdvisorCommand.java b/ambari-server/src/main/java/org/apache/ambari/server/api/services/stackadvisor/commands/StackAdvisorCommand.java
index 81d4605..bc98c4c 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/api/services/stackadvisor/commands/StackAdvisorCommand.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/api/services/stackadvisor/commands/StackAdvisorCommand.java
@@ -42,6 +42,7 @@ import org.apache.ambari.server.api.services.Request;
 import org.apache.ambari.server.api.services.StacksService.StackUriInfo;
 import org.apache.ambari.server.api.services.stackadvisor.StackAdvisorException;
 import org.apache.ambari.server.api.services.stackadvisor.StackAdvisorRequest;
+import org.apache.ambari.server.api.services.stackadvisor.StackAdvisorResponse;
 import org.apache.ambari.server.api.services.stackadvisor.StackAdvisorRunner;
 import org.apache.ambari.server.controller.spi.Resource;
 import org.apache.commons.collections.CollectionUtils;
@@ -58,7 +59,7 @@ import org.codehaus.jackson.node.TextNode;
 /**
  * Parent for all commands.
  */
-public abstract class StackAdvisorCommand<T> extends BaseService {
+public abstract class StackAdvisorCommand<T extends StackAdvisorResponse> extends BaseService {
 
   /**
    * Type of response object provided by extending classes when
@@ -233,7 +234,7 @@ public abstract class StackAdvisorCommand<T> extends BaseService {
       String result = FileUtils.readFileToString(new File(requestDirectory, getResultFileName()));
 
       T response = this.mapper.readValue(result, this.type);
-      return updateResponse(request, response);
+      return updateResponse(request, setRequestId(response));
     } catch (Exception e) {
       String message = "Error occured during stack advisor command invocation";
       LOG.warn(message, e);
@@ -243,6 +244,11 @@ public abstract class StackAdvisorCommand<T> extends BaseService {
 
   protected abstract T updateResponse(StackAdvisorRequest request, T response);
 
+  private T setRequestId(T response) {
+    response.setId(requestId);
+    return response;
+  }
+
   /**
    * Create request id directory for each call
    */

http://git-wip-us.apache.org/repos/asf/ambari/blob/a80066ae/ambari-server/src/main/java/org/apache/ambari/server/api/services/stackadvisor/recommendations/RecommendationResponse.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/api/services/stackadvisor/recommendations/RecommendationResponse.java b/ambari-server/src/main/java/org/apache/ambari/server/api/services/stackadvisor/recommendations/RecommendationResponse.java
index a34291c..1796f9a 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/api/services/stackadvisor/recommendations/RecommendationResponse.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/api/services/stackadvisor/recommendations/RecommendationResponse.java
@@ -21,15 +21,13 @@ package org.apache.ambari.server.api.services.stackadvisor.recommendations;
 import java.util.Map;
 import java.util.Set;
 
+import org.apache.ambari.server.api.services.stackadvisor.StackAdvisorResponse;
 import org.codehaus.jackson.annotate.JsonProperty;
 
 /**
  * Recommendation response POJO.
  */
-public class RecommendationResponse {
-
-  @JsonProperty("Versions")
-  private Version version;
+public class RecommendationResponse extends StackAdvisorResponse {
 
   @JsonProperty
   private Set<String> hosts;
@@ -40,14 +38,6 @@ public class RecommendationResponse {
   @JsonProperty
   private Recommendation recommendations;
 
-  public Version getVersion() {
-    return version;
-  }
-
-  public void setVersion(Version version) {
-    this.version = version;
-  }
-
   public Set<String> getHosts() {
     return hosts;
   }
@@ -72,30 +62,6 @@ public class RecommendationResponse {
     this.recommendations = recommendations;
   }
 
-  public static class Version {
-    @JsonProperty("stack_name")
-    private String stackName;
-
-    @JsonProperty("stack_version")
-    private String stackVersion;
-
-    public String getStackName() {
-      return stackName;
-    }
-
-    public void setStackName(String stackName) {
-      this.stackName = stackName;
-    }
-
-    public String getStackVersion() {
-      return stackVersion;
-    }
-
-    public void setStackVersion(String stackVersion) {
-      this.stackVersion = stackVersion;
-    }
-  }
-
   public static class Recommendation {
     @JsonProperty
     private Blueprint blueprint;

http://git-wip-us.apache.org/repos/asf/ambari/blob/a80066ae/ambari-server/src/main/java/org/apache/ambari/server/api/services/stackadvisor/validations/ValidationResponse.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/api/services/stackadvisor/validations/ValidationResponse.java b/ambari-server/src/main/java/org/apache/ambari/server/api/services/stackadvisor/validations/ValidationResponse.java
index 145bcd9..d6be483 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/api/services/stackadvisor/validations/ValidationResponse.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/api/services/stackadvisor/validations/ValidationResponse.java
@@ -20,28 +20,17 @@ package org.apache.ambari.server.api.services.stackadvisor.validations;
 
 import java.util.Set;
 
-import org.apache.ambari.server.api.services.stackadvisor.recommendations.RecommendationResponse.Version;
+import org.apache.ambari.server.api.services.stackadvisor.StackAdvisorResponse;
 import org.codehaus.jackson.annotate.JsonProperty;
 
 /**
  * Validation response POJO.
  */
-public class ValidationResponse {
-
-  @JsonProperty("Versions")
-  private Version version;
+public class ValidationResponse extends StackAdvisorResponse {
 
   @JsonProperty
   private Set<ValidationItem> items;
 
-  public Version getVersion() {
-    return version;
-  }
-
-  public void setVersion(Version version) {
-    this.version = version;
-  }
-
   public Set<ValidationItem> getItems() {
     return items;
   }

http://git-wip-us.apache.org/repos/asf/ambari/blob/a80066ae/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/RecommendationResourceProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/RecommendationResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/RecommendationResourceProvider.java
index 3e5aef4..0463076 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/RecommendationResourceProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/RecommendationResourceProvider.java
@@ -50,7 +50,7 @@ import org.apache.ambari.server.controller.utilities.PropertyHelper;
 public class RecommendationResourceProvider extends StackAdvisorResourceProvider {
 
   protected static final String RECOMMENDATION_ID_PROPERTY_ID = PropertyHelper.getPropertyId(
-      "Recommendations", "id");
+      "Recommendation", "id");
 
   protected static final String HOSTS_PROPERTY_ID = "hosts";
   protected static final String SERVICES_PROPERTY_ID = "services";
@@ -101,7 +101,7 @@ public class RecommendationResourceProvider extends StackAdvisorResourceProvider
       public Resource invoke() throws AmbariException {
 
         Resource resource = new ResourceImpl(Resource.Type.Recommendation);
-        setResourceProperty(resource, RECOMMENDATION_ID_PROPERTY_ID, "1", getPropertyIds());
+        setResourceProperty(resource, RECOMMENDATION_ID_PROPERTY_ID, response.getId(), getPropertyIds());
         setResourceProperty(resource, STACK_NAME_PROPERTY_ID, response.getVersion().getStackName(),
             getPropertyIds());
         setResourceProperty(resource, STACK_VERSION_PROPERTY_ID, response.getVersion()

http://git-wip-us.apache.org/repos/asf/ambari/blob/a80066ae/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ValidationResourceProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ValidationResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ValidationResourceProvider.java
index bab1473..017abae 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ValidationResourceProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ValidationResourceProvider.java
@@ -49,7 +49,7 @@ import org.apache.ambari.server.controller.utilities.PropertyHelper;
 public class ValidationResourceProvider extends StackAdvisorResourceProvider {
 
   protected static final String VALIDATION_ID_PROPERTY_ID = PropertyHelper.getPropertyId(
-      "Validations", "id");
+      "Validation", "id");
   protected static final String VALIDATE_PROPERTY_ID = "validate";
 
   protected static final String ITEMS_PROPERTY_ID = "items";
@@ -93,7 +93,7 @@ public class ValidationResourceProvider extends StackAdvisorResourceProvider {
       public Resource invoke() throws AmbariException {
 
         Resource resource = new ResourceImpl(Resource.Type.Validation);
-        setResourceProperty(resource, VALIDATION_ID_PROPERTY_ID, "1", getPropertyIds());
+        setResourceProperty(resource, VALIDATION_ID_PROPERTY_ID, response.getId(), getPropertyIds());
         setResourceProperty(resource, STACK_NAME_PROPERTY_ID, response.getVersion().getStackName(), getPropertyIds());
         setResourceProperty(resource, STACK_VERSION_PROPERTY_ID, response.getVersion().getStackVersion(), getPropertyIds());
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/a80066ae/ambari-server/src/test/java/org/apache/ambari/server/api/services/stackadvisor/commands/StackAdvisorCommandTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/api/services/stackadvisor/commands/StackAdvisorCommandTest.java b/ambari-server/src/test/java/org/apache/ambari/server/api/services/stackadvisor/commands/StackAdvisorCommandTest.java
index cf792ff..1a19c46 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/api/services/stackadvisor/commands/StackAdvisorCommandTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/api/services/stackadvisor/commands/StackAdvisorCommandTest.java
@@ -41,6 +41,7 @@ import org.apache.ambari.server.api.services.AmbariMetaInfo;
 import org.apache.ambari.server.api.services.stackadvisor.StackAdvisorException;
 import org.apache.ambari.server.api.services.stackadvisor.StackAdvisorRequest;
 import org.apache.ambari.server.api.services.stackadvisor.StackAdvisorRequest.StackAdvisorRequestBuilder;
+import org.apache.ambari.server.api.services.stackadvisor.StackAdvisorResponse;
 import org.apache.ambari.server.api.services.stackadvisor.StackAdvisorRunner;
 import org.apache.ambari.server.api.services.stackadvisor.commands.StackAdvisorCommand.StackAdvisorData;
 import org.apache.commons.io.FileUtils;
@@ -150,7 +151,7 @@ public class StackAdvisorCommandTest {
     final String testResourceString = String.format("{\"type\": \"%s\"}", expected);
     final File recommendationsDir = temp.newFolder("recommendationDir");
     String stackAdvisorScript = "echo";
-    final int requestId = 0;
+    final int requestId = 2;
     StackAdvisorRunner saRunner = mock(StackAdvisorRunner.class);
     AmbariMetaInfo metaInfo = mock(AmbariMetaInfo.class);
     doReturn(Collections.emptyList()).when(metaInfo).getStackParentVersions(anyString(), anyString());
@@ -180,6 +181,7 @@ public class StackAdvisorCommandTest {
     TestResource result = command.invoke(request);
 
     assertEquals(expected, result.getType());
+    assertEquals(requestId, result.getId());
   }
 
   @Test
@@ -260,7 +262,7 @@ public class StackAdvisorCommandTest {
     }
   }
 
-  public static class TestResource {
+  public static class TestResource extends StackAdvisorResponse {
     @JsonProperty
     private String type;
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/a80066ae/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/RecommendationResourceProviderTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/RecommendationResourceProviderTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/RecommendationResourceProviderTest.java
new file mode 100644
index 0000000..1810efa
--- /dev/null
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/RecommendationResourceProviderTest.java
@@ -0,0 +1,31 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.controller.internal;
+
+import org.junit.Test;
+
+import static org.junit.Assert.*;
+
+public class RecommendationResourceProviderTest {
+
+  @Test
+  public void testCreateResources() throws Exception {
+
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/a80066ae/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ValidationResourceProviderTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ValidationResourceProviderTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ValidationResourceProviderTest.java
new file mode 100644
index 0000000..cdbf2d7
--- /dev/null
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ValidationResourceProviderTest.java
@@ -0,0 +1,70 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.controller.internal;
+
+import org.apache.ambari.server.api.services.stackadvisor.StackAdvisorHelper;
+import org.apache.ambari.server.api.services.stackadvisor.StackAdvisorRequest;
+import org.apache.ambari.server.api.services.stackadvisor.validations.ValidationResponse;
+import org.apache.ambari.server.controller.AmbariManagementController;
+import org.apache.ambari.server.controller.spi.Request;
+import org.apache.ambari.server.controller.spi.RequestStatus;
+import org.apache.ambari.server.controller.spi.Resource;
+import org.junit.Test;
+
+import java.util.Collections;
+import java.util.Map;
+import java.util.Set;
+
+import static org.apache.ambari.server.api.services.stackadvisor.StackAdvisorResponse.Version;
+import static org.mockito.Mockito.*;
+import static org.junit.Assert.*;
+
+public class ValidationResourceProviderTest {
+
+  @Test
+  public void testCreateResources_checkRequestId() throws Exception {
+    Map<Resource.Type, String> keyPropertyIds = Collections.emptyMap();
+    Set<String> propertyIds = Collections.singleton(ValidationResourceProvider.VALIDATION_ID_PROPERTY_ID);
+    AmbariManagementController ambariManagementController = mock(AmbariManagementController.class);
+    ValidationResourceProvider provider = spy(new ValidationResourceProvider(propertyIds,
+        keyPropertyIds, ambariManagementController));
+    StackAdvisorRequest stackAdvisorRequest = mock(StackAdvisorRequest.class);
+    Request request = mock(Request.class);
+    doReturn(stackAdvisorRequest).when(provider).prepareStackAdvisorRequest(request);
+
+    StackAdvisorHelper saHelper = mock(StackAdvisorHelper.class);
+
+    ValidationResponse response = mock(ValidationResponse.class);
+    Version version = mock(Version.class);
+    doReturn(3).when(response).getId();
+    doReturn(version).when(response).getVersion();
+    doReturn(response).when(saHelper).validate(any(StackAdvisorRequest.class));
+    ValidationResourceProvider.init(saHelper);
+
+    RequestStatus status = provider.createResources(request);
+
+    Set<Resource> associatedResources = status.getAssociatedResources();
+    assertNotNull(associatedResources);
+    assertEquals(1, associatedResources.size());
+    Resource resource = associatedResources.iterator().next();
+    Object requestId = resource.getPropertyValue(ValidationResourceProvider.VALIDATION_ID_PROPERTY_ID);
+    assertNotNull(requestId);
+    assertEquals(3, requestId);
+  }
+}
\ No newline at end of file


[13/35] git commit: AMBARI-6987. Ambari upgrade. resource managemnt need refactoring in install-helper.py.->remove(vbrodetskyi)

Posted by jo...@apache.org.
AMBARI-6987. Ambari upgrade. resource managemnt need refactoring in install-helper.py.->remove(vbrodetskyi)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/40f1c4e8
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/40f1c4e8
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/40f1c4e8

Branch: refs/heads/branch-alerts-dev
Commit: 40f1c4e8f35bc718681d28a93ea021a412070206
Parents: 574461d
Author: Vitaly Brodetskyi <vb...@hortonworks.com>
Authored: Tue Aug 26 16:39:55 2014 +0300
Committer: Vitaly Brodetskyi <vb...@hortonworks.com>
Committed: Tue Aug 26 16:40:14 2014 +0300

----------------------------------------------------------------------
 ambari-agent/conf/unix/install-helper.sh  | 7 -------
 ambari-server/conf/unix/install-helper.sh | 3 ---
 2 files changed, 10 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/40f1c4e8/ambari-agent/conf/unix/install-helper.sh
----------------------------------------------------------------------
diff --git a/ambari-agent/conf/unix/install-helper.sh b/ambari-agent/conf/unix/install-helper.sh
index 497db8d..0be3d2d 100644
--- a/ambari-agent/conf/unix/install-helper.sh
+++ b/ambari-agent/conf/unix/install-helper.sh
@@ -51,13 +51,6 @@ do_install(){
 }
 
 do_remove(){
-  if [ -d "$COMMON_DIR" ]; then  # common dir exists
-    rm -f "$COMMON_DIR"
-  fi
-
-  if [ -d "$RESOURCE_MANAGEMENT_DIR" ]; then  # resource_management dir exists
-    rm -f "$RESOURCE_MANAGEMENT_DIR"
-  fi
 
   if [ -f "$PYTHON_WRAPER_TARGET" ]; then
     rm -f "$PYTHON_WRAPER_TARGET"

http://git-wip-us.apache.org/repos/asf/ambari/blob/40f1c4e8/ambari-server/conf/unix/install-helper.sh
----------------------------------------------------------------------
diff --git a/ambari-server/conf/unix/install-helper.sh b/ambari-server/conf/unix/install-helper.sh
index 30946a5..751bcd0 100644
--- a/ambari-server/conf/unix/install-helper.sh
+++ b/ambari-server/conf/unix/install-helper.sh
@@ -51,9 +51,6 @@ do_install(){
 
 do_remove(){
 
-  rm -rf "$COMMON_DIR"
-  rm -rf "$RESOURCE_MANAGEMENT_DIR"
-
   if [ -f "$PYTHON_WRAPER_TARGET" ]; then
     rm -f "$PYTHON_WRAPER_TARGET"
   fi