You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by dm...@apache.org on 2014/05/08 18:07:13 UTC

[4/4] git commit: AMBARI-5705. Fail to install hive client with error: Fail: Execution of curl returned 22. (dlysnichenko)

AMBARI-5705. Fail to install hive client with error: Fail: Execution of curl returned 22. (dlysnichenko)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/4f5478d1
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/4f5478d1
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/4f5478d1

Branch: refs/heads/branch-1.6.0
Commit: 4f5478d10c50d1a8b830034ab1c879a369189e24
Parents: cfc7d33
Author: Lisnichenko Dmitro <dl...@hortonworks.com>
Authored: Tue May 6 21:05:30 2014 +0300
Committer: Lisnichenko Dmitro <dl...@hortonworks.com>
Committed: Thu May 8 19:06:33 2014 +0300

----------------------------------------------------------------------
 .../apache/ambari/server/utils/StageUtils.java  |  33 +-
 .../hooks/before-INSTALL/scripts/params.py      |   1 +
 .../scripts/shared_initialization.py            |  26 +-
 .../1.3.2/hooks/before-START/scripts/params.py  |   1 +
 .../scripts/shared_initialization.py            |   6 +-
 .../1.3.2/services/HIVE/package/scripts/hive.py |   6 +-
 .../services/HIVE/package/scripts/params.py     |   1 +
 .../NAGIOS/package/files/check_namenodes_ha.sh  |   2 +-
 .../package/files/check_nodemanager_health.sh   |   2 +-
 .../package/files/check_templeton_status.sh     |   2 +-
 .../NAGIOS/package/files/check_webui.sh         |  11 +-
 .../services/OOZIE/package/scripts/oozie.py     |   3 +-
 .../services/OOZIE/package/scripts/params.py    |   1 +
 .../WEBHCAT/package/files/templetonSmoke.sh     |   6 +-
 .../hooks/before-INSTALL/scripts/params.py      |   1 +
 .../scripts/shared_initialization.py            |  25 +-
 .../2.0.6/hooks/before-START/scripts/params.py  |   1 +
 .../scripts/shared_initialization.py            |   6 +-
 .../2.0.6/services/HIVE/package/scripts/hive.py |   6 +-
 .../services/HIVE/package/scripts/params.py     |   1 +
 .../NAGIOS/package/files/check_namenodes_ha.sh  |   2 +-
 .../package/files/check_nodemanager_health.sh   |   2 +-
 .../package/files/check_templeton_status.sh     |   2 +-
 .../NAGIOS/package/files/check_webui.sh         |  11 +-
 .../services/OOZIE/package/scripts/oozie.py     |   3 +-
 .../services/OOZIE/package/scripts/params.py    |   1 +
 .../WEBHCAT/package/files/templetonSmoke.sh     |   6 +-
 .../ambari/server/utils/TestStageUtils.java     |  37 +-
 .../stacks/1.3.2/HIVE/test_hive_client.py       |   4 +-
 .../stacks/1.3.2/HIVE/test_hive_metastore.py    |   4 +-
 .../stacks/1.3.2/HIVE/test_hive_server.py       |   4 +-
 .../1.3.2/configs/default.hbasedecom.json       |   3 +
 .../python/stacks/1.3.2/configs/default.json    |   3 +
 .../1.3.2/configs/default.non_gmetad_host.json  |   3 +
 .../python/stacks/1.3.2/configs/secured.json    |   3 +
 .../1.3.2/configs/secured_no_jce_name.json      | 600 ++++++++++++++
 .../hooks/before-INSTALL/test_before_install.py |  21 +-
 .../stacks/2.0.6/HIVE/test_hive_client.py       |   4 +-
 .../stacks/2.0.6/HIVE/test_hive_metastore.py    |   4 +-
 .../stacks/2.0.6/HIVE/test_hive_server.py       |   4 +-
 .../2.0.6/configs/default.hbasedecom.json       |   3 +
 .../python/stacks/2.0.6/configs/default.json    |   3 +
 .../2.0.6/configs/default.non_gmetad_host.json  |   3 +
 .../stacks/2.0.6/configs/flume_target.json      |   3 +
 .../python/stacks/2.0.6/configs/ha_default.json |   3 +
 .../python/stacks/2.0.6/configs/ha_secured.json |   3 +
 .../python/stacks/2.0.6/configs/secured.json    |   3 +
 .../2.0.6/configs/secured_no_jce_name.json      | 787 +++++++++++++++++++
 .../hooks/before-INSTALL/test_before_install.py |  18 +-
 .../stacks/2.1/HIVE/test_hive_metastore.py      |   4 +-
 .../test/python/stacks/2.1/configs/default.json |   3 +
 .../test/python/stacks/2.1/configs/secured.json |   3 +
 52 files changed, 1613 insertions(+), 85 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/4f5478d1/ambari-server/src/main/java/org/apache/ambari/server/utils/StageUtils.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/utils/StageUtils.java b/ambari-server/src/main/java/org/apache/ambari/server/utils/StageUtils.java
index f8b9636..767a97d 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/utils/StageUtils.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/utils/StageUtils.java
@@ -24,6 +24,7 @@ import org.apache.ambari.server.Role;
 import org.apache.ambari.server.RoleCommand;
 import org.apache.ambari.server.actionmanager.Stage;
 import org.apache.ambari.server.agent.ExecutionCommand;
+import org.apache.ambari.server.configuration.Configuration;
 import org.apache.ambari.server.state.Cluster;
 import org.apache.ambari.server.state.Host;
 import org.apache.ambari.server.state.HostComponentAdminState;
@@ -60,9 +61,9 @@ import java.util.TreeSet;
 
 public class StageUtils {
 
-
   public static final Integer DEFAULT_PING_PORT = 8670;
   private static final Log LOG = LogFactory.getLog(StageUtils.class);
+  static final String AMBARI_SERVER_HOST = "ambari_server_host";
   private static final String HOSTS_LIST = "all_hosts";
   private static final String PORTS = "all_ping_ports";
   private static Map<String, String> componentToClusterInfoKeyMap =
@@ -71,6 +72,16 @@ public class StageUtils {
       new HashMap<String, String>();
   private volatile static Gson gson;
 
+  private static String server_hostname;
+  static {
+    try {
+      server_hostname = InetAddress.getLocalHost().getCanonicalHostName();
+    } catch (UnknownHostException e) {
+      LOG.warn("Could not find canonical hostname ", e);
+      server_hostname = "localhost";
+    }
+  }
+
   public static Gson getGson() {
     if (gson != null) {
       return gson;
@@ -224,7 +235,14 @@ public class StageUtils {
 
     List<String> hostsList = new ArrayList<String>(hostsSet);
 
-    //Fill host roles
+    //     Fill host roles
+    // Fill server host
+    TreeSet<Integer> serverHost = new TreeSet<Integer>();
+    int hostIndex = hostsList.indexOf(getHostName());
+    serverHost.add(hostIndex);
+    hostRolesInfo.put(AMBARI_SERVER_HOST, serverHost);
+
+    // Fill hosts for services
     for (Entry<String, Service> serviceEntry : cluster.getServices().entrySet()) {
 
       Service service = serviceEntry.getValue();
@@ -255,7 +273,7 @@ public class StageUtils {
               hostRolesInfo.put(roleName, hostsForComponentsHost);
             }
 
-            int hostIndex = hostsList.indexOf(hostName);
+            hostIndex = hostsList.indexOf(hostName);
             //Add index of host to current host role
             hostsForComponentsHost.add(hostIndex);
           }
@@ -270,7 +288,7 @@ public class StageUtils {
                 hostRolesInfo.put(decomRoleName, hostsForComponentsHost);
               }
 
-              int hostIndex = hostsList.indexOf(hostName);
+              hostIndex = hostsList.indexOf(hostName);
               //Add index of host to current host role
               hostsForComponentsHost.add(hostIndex);
             }
@@ -377,11 +395,6 @@ public class StageUtils {
   }
 
   public static String getHostName() {
-    try {
-      return InetAddress.getLocalHost().getCanonicalHostName();
-    } catch (UnknownHostException e) {
-      LOG.warn("Could not find canonical hostname ", e);
-      return "localhost";
-    }
+    return server_hostname;
   }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/4f5478d1/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-INSTALL/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-INSTALL/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-INSTALL/scripts/params.py
index f2a4199..7c5ac6e 100644
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-INSTALL/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-INSTALL/scripts/params.py
@@ -89,6 +89,7 @@ mapred_tt_group = default("/configurations/mapred-site/mapreduce.tasktracker.gro
 
 #hosts
 hostname = config["hostname"]
+ambari_server_hostname = config['clusterHostInfo']['ambari_server_host'][0]
 rm_host = default("/clusterHostInfo/rm_host", [])
 slave_hosts = default("/clusterHostInfo/slave_hosts", [])
 hagios_server_hosts = default("/clusterHostInfo/nagios_server_host", [])

http://git-wip-us.apache.org/repos/asf/ambari/blob/4f5478d1/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-INSTALL/scripts/shared_initialization.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-INSTALL/scripts/shared_initialization.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-INSTALL/scripts/shared_initialization.py
index 98b5dea..1c68216 100644
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-INSTALL/scripts/shared_initialization.py
+++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-INSTALL/scripts/shared_initialization.py
@@ -114,7 +114,9 @@ def setup_java():
   if not params.jdk_name:
     return
 
-  Execute(format("mkdir -p {artifact_dir} ; curl -kf --retry 10 {jdk_location}/{jdk_name} -o {jdk_curl_target}"),
+  Execute(format("mkdir -p {artifact_dir} ; curl -kf "
+                 "--noproxy {ambari_server_hostname} "
+                 "--retry 10 {jdk_location}/{jdk_name} -o {jdk_curl_target}"),
           path = ["/bin","/usr/bin/"],
           not_if = format("test -e {java_exec}"))
 
@@ -127,13 +129,21 @@ def setup_java():
           path = ["/bin","/usr/bin/"],
           not_if = format("test -e {java_exec}")
   )
-  jce_curl_target = format("{artifact_dir}/{jce_policy_zip}")
-  download_jce = format("mkdir -p {artifact_dir}; curl -kf --retry 10 {jce_location}/{jce_policy_zip} -o {jce_curl_target}")
-  Execute( download_jce,
-           path = ["/bin","/usr/bin/"],
-           not_if =format("test -e {jce_curl_target}"),
-           ignore_failures = True
-  )
+
+  if params.jce_policy_zip is not None:
+    jce_curl_target = format("{artifact_dir}/{jce_policy_zip}")
+    download_jce = format("mkdir -p {artifact_dir}; curl -kf "
+                          "--noproxy {ambari_server_hostname} "
+                          "--retry 10 {jce_location}/{jce_policy_zip} "
+                          "-o {jce_curl_target}")
+    Execute( download_jce,
+             path = ["/bin","/usr/bin/"],
+             not_if =format("test -e {jce_curl_target}"),
+             ignore_failures = True
+    )
+  elif params.security_enabled:
+    # Something weird is happening
+    raise Fail("Security is enabled, but JCE policy zip is not specified.")
 
   if params.security_enabled:
     security_dir = format("{java_home}/jre/lib/security")

http://git-wip-us.apache.org/repos/asf/ambari/blob/4f5478d1/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-START/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-START/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-START/scripts/params.py
index 61e04f8..17b884c 100644
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-START/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-START/scripts/params.py
@@ -39,6 +39,7 @@ snmp_community = "hadoop"
 
 #hosts
 hostname = config["hostname"]
+ambari_server_hostname = config['clusterHostInfo']['ambari_server_host'][0]
 rm_host = default("/clusterHostInfo/rm_host", [])
 slave_hosts = default("/clusterHostInfo/slave_hosts", [])
 hagios_server_hosts = default("/clusterHostInfo/nagios_server_host", [])

http://git-wip-us.apache.org/repos/asf/ambari/blob/4f5478d1/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-START/scripts/shared_initialization.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-START/scripts/shared_initialization.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-START/scripts/shared_initialization.py
index 35f22ba..3b4a7fc 100644
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-START/scripts/shared_initialization.py
+++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-START/scripts/shared_initialization.py
@@ -90,10 +90,12 @@ def setup_database():
   db_driver_dload_cmd = ""
   if params.server_db_name == 'oracle' and params.oracle_driver_url != "":
     db_driver_dload_cmd = format(
-      "curl -kf --retry 5 {oracle_driver_url} -o {hadoop_lib_home}/{db_driver_filename}")
+      "curl -kf --noproxy {ambari_server_hostname} --retry 5 {oracle_driver_url}"
+      " -o {hadoop_lib_home}/{db_driver_filename}")
   elif params.server_db_name == 'mysql' and params.mysql_driver_url != "":
     db_driver_dload_cmd = format(
-      "curl -kf --retry 5 {mysql_driver_url} -o {hadoop_lib_home}/{db_driver_filename}")
+      "curl -kf --noproxy {ambari_server_hostname} --retry 5 {mysql_driver_url} "
+      "-o {hadoop_lib_home}/{db_driver_filename}")
 
   if db_driver_dload_cmd:
     Execute(db_driver_dload_cmd,

http://git-wip-us.apache.org/repos/asf/ambari/blob/4f5478d1/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/hive.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/hive.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/hive.py
index 5f2f15e..276410b 100644
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/hive.py
+++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/hive.py
@@ -61,8 +61,10 @@ def hive(name=None):
             mode=config_file_mode
   )
 
-  cmd = format("/bin/sh -c 'cd /usr/lib/ambari-agent/ && curl -kf --retry 5 "
-               "{jdk_location}{check_db_connection_jar_name} -o {check_db_connection_jar_name}'")
+  cmd = format("/bin/sh -c 'cd /usr/lib/ambari-agent/ && curl -kf "
+               "--noproxy {ambari_server_hostname} --retry 5 "
+               "{jdk_location}{check_db_connection_jar_name} "
+               "-o {check_db_connection_jar_name}'")
 
   Execute(cmd,
           not_if=format("[ -f {check_db_connection_jar_name}]"))

http://git-wip-us.apache.org/repos/asf/ambari/blob/4f5478d1/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/params.py
index bf52dba..1f21780 100644
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/params.py
@@ -48,6 +48,7 @@ check_db_connection_jar = format("/usr/lib/ambari-agent/{check_db_connection_jar
 #common
 hive_metastore_port = config['configurations']['global']['hive_metastore_port']
 hive_var_lib = '/var/lib/hive'
+ambari_server_hostname = config['clusterHostInfo']['ambari_server_host'][0]
 hive_server_host = config['clusterHostInfo']['hive_server_host'][0]
 hive_server_port = 10000
 hive_url = format("jdbc:hive2://{hive_server_host}:{hive_server_port}")

http://git-wip-us.apache.org/repos/asf/ambari/blob/4f5478d1/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/NAGIOS/package/files/check_namenodes_ha.sh
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/NAGIOS/package/files/check_namenodes_ha.sh b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/NAGIOS/package/files/check_namenodes_ha.sh
index 50b075a..a0b348e 100644
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/NAGIOS/package/files/check_namenodes_ha.sh
+++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/NAGIOS/package/files/check_namenodes_ha.sh
@@ -29,7 +29,7 @@ unavailableNN=()
 
 for nn in "${namenodes[@]}"
 do
-  status=$(curl -m 5 -s http://$nn:$port/jmx?qry=Hadoop:service=NameNode,name=FSNamesystem | grep -i "tag.HAState" | grep -o -E "standby|active")
+  status=$(curl --noproxy $nn -m 5 -s http://$nn:$port/jmx?qry=Hadoop:service=NameNode,name=FSNamesystem | grep -i "tag.HAState" | grep -o -E "standby|active")
   if [ "$status" == "active" ]; then
     activeNN[${#activeNN[*]}]="$nn"
   elif [ "$status" == "standby" ]; then

http://git-wip-us.apache.org/repos/asf/ambari/blob/4f5478d1/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/NAGIOS/package/files/check_nodemanager_health.sh
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/NAGIOS/package/files/check_nodemanager_health.sh b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/NAGIOS/package/files/check_nodemanager_health.sh
index 020b41d..5b2a93f 100644
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/NAGIOS/package/files/check_nodemanager_health.sh
+++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/NAGIOS/package/files/check_nodemanager_health.sh
@@ -35,7 +35,7 @@ if [[ "$SEC_ENABLED" == "true" ]]; then
   fi
 fi
 
-RESPONSE=`curl --negotiate -u : -s $NODEMANAGER_URL`
+RESPONSE=`curl --noproxy $HOST --negotiate -u : -s $NODEMANAGER_URL`
 if [[ "$RESPONSE" == *'"nodeHealthy":true'* ]]; then 
   echo "OK: NodeManager healthy";
   exit 0;

http://git-wip-us.apache.org/repos/asf/ambari/blob/4f5478d1/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/NAGIOS/package/files/check_templeton_status.sh
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/NAGIOS/package/files/check_templeton_status.sh b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/NAGIOS/package/files/check_templeton_status.sh
index 7fbc4c4..6aa944b 100644
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/NAGIOS/package/files/check_templeton_status.sh
+++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/NAGIOS/package/files/check_templeton_status.sh
@@ -35,7 +35,7 @@ if [[ "$SEC_ENABLED" == "true" ]]; then
   fi
 fi
 regex="^.*\"status\":\"ok\".*<status_code:200>$"
-out=`curl --negotiate -u : -s -w '<status_code:%{http_code}>' http://$HOST:$PORT/templeton/$VERSION/status 2>&1`
+out=`curl --noproxy $HOST --negotiate -u : -s -w '<status_code:%{http_code}>' http://$HOST:$PORT/templeton/$VERSION/status 2>&1`
 if [[ $out =~ $regex ]]; then
   out=`echo "$out" | sed -e 's/{/[/g' | sed -e 's/}/]/g'` 
   echo "OK: WebHCat Server status [$out]";

http://git-wip-us.apache.org/repos/asf/ambari/blob/4f5478d1/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/NAGIOS/package/files/check_webui.sh
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/NAGIOS/package/files/check_webui.sh b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/NAGIOS/package/files/check_webui.sh
index b23045e..2be3388 100644
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/NAGIOS/package/files/check_webui.sh
+++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/NAGIOS/package/files/check_webui.sh
@@ -19,16 +19,17 @@
 # under the License.
 #
 #
-checkurl () {
-  url=$1
-  curl $url -o /dev/null
-  echo $?
-}
 
 service=$1
 host=$2
 port=$3
 
+checkurl () {
+  url=$1
+  curl $url --noproxy $host -o /dev/null
+  echo $?
+}
+
 if [[ -z "$service" || -z "$host" ]]; then
   echo "UNKNOWN: Invalid arguments; Usage: check_webui.sh service_name host_name";
   exit 3;

http://git-wip-us.apache.org/repos/asf/ambari/blob/4f5478d1/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/OOZIE/package/scripts/oozie.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/OOZIE/package/scripts/oozie.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/OOZIE/package/scripts/oozie.py
index 6523fbd..4ab267a 100644
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/OOZIE/package/scripts/oozie.py
+++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/OOZIE/package/scripts/oozie.py
@@ -66,7 +66,8 @@ def oozie(is_server=False
      params.jdbc_driver_name == "org.postgresql.Driver" or \
      params.jdbc_driver_name == "oracle.jdbc.driver.OracleDriver":
     Execute(format("/bin/sh -c 'cd /usr/lib/ambari-agent/ &&\
-    curl -kf --retry 5 {jdk_location}{check_db_connection_jar_name}\
+    curl -kf --noproxy {ambari_server_hostname} \
+    --retry 5 {jdk_location}{check_db_connection_jar_name}\
      -o {check_db_connection_jar_name}'"),
       not_if  = format("[ -f {check_db_connection_jar} ]")
     )

http://git-wip-us.apache.org/repos/asf/ambari/blob/4f5478d1/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/OOZIE/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/OOZIE/package/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/OOZIE/package/scripts/params.py
index bb6e27c..cd1219e 100644
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/OOZIE/package/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/OOZIE/package/scripts/params.py
@@ -24,6 +24,7 @@ import status_params
 # server configurations
 config = Script.get_config()
 
+ambari_server_hostname = config['clusterHostInfo']['ambari_server_host'][0]
 oozie_user = config['configurations']['global']['oozie_user']
 smokeuser = config['configurations']['global']['smokeuser']
 conf_dir = "/etc/oozie/conf"

http://git-wip-us.apache.org/repos/asf/ambari/blob/4f5478d1/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/WEBHCAT/package/files/templetonSmoke.sh
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/WEBHCAT/package/files/templetonSmoke.sh b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/WEBHCAT/package/files/templetonSmoke.sh
index cefc4f0..6ec9a0f 100644
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/WEBHCAT/package/files/templetonSmoke.sh
+++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/WEBHCAT/package/files/templetonSmoke.sh
@@ -33,7 +33,7 @@ else
   kinitcmd=""
 fi
 
-cmd="${kinitcmd}curl --negotiate -u : -s -w 'http_code <%{http_code}>'    $ttonurl/status 2>&1"
+cmd="${kinitcmd}curl --noproxy ${ttonhost} --negotiate -u : -s -w 'http_code <%{http_code}>'    $ttonurl/status 2>&1"
 retVal=`su - ${smoke_test_user} -c "$cmd"`
 httpExitCode=`echo $retVal |sed 's/.*http_code <\([0-9]*\)>.*/\1/'`
 
@@ -47,7 +47,7 @@ exit 0
 
 #try hcat ddl command
 echo "user.name=${smoke_test_user}&exec=show databases;" /tmp/show_db.post.txt
-cmd="${kinitcmd}curl --negotiate -u : -s -w 'http_code <%{http_code}>' -d  \@${destdir}/show_db.post.txt  $ttonurl/ddl 2>&1"
+cmd="${kinitcmd}curl --noproxy ${ttonhost} --negotiate -u : -s -w 'http_code <%{http_code}>' -d  \@${destdir}/show_db.post.txt  $ttonurl/ddl 2>&1"
 retVal=`su - ${smoke_test_user} -c "$cmd"`
 httpExitCode=`echo $retVal |sed 's/.*http_code <\([0-9]*\)>.*/\1/'`
 
@@ -83,7 +83,7 @@ su - ${smoke_test_user} -c "hadoop dfs -copyFromLocal /etc/passwd $ttonTestInput
 echo -n "user.name=${smoke_test_user}&file=/tmp/$ttonTestScript" > /tmp/pig_post.txt
 
 #submit pig query
-cmd="curl -s -w 'http_code <%{http_code}>' -d  \@${destdir}/pig_post.txt  $ttonurl/pig 2>&1"
+cmd="curl --noproxy ${ttonhost} -s -w 'http_code <%{http_code}>' -d  \@${destdir}/pig_post.txt  $ttonurl/pig 2>&1"
 retVal=`su - ${smoke_test_user} -c "$cmd"`
 httpExitCode=`echo $retVal |sed 's/.*http_code <\([0-9]*\)>.*/\1/'`
 if [[ "$httpExitCode" -ne "200" ]] ; then

http://git-wip-us.apache.org/repos/asf/ambari/blob/4f5478d1/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-INSTALL/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-INSTALL/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-INSTALL/scripts/params.py
index 1f8bfa8..eeb21e8 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-INSTALL/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-INSTALL/scripts/params.py
@@ -49,6 +49,7 @@ mapred_tt_group = default("/configurations/mapred-site/mapreduce.tasktracker.gro
 
 #hosts
 hostname = config["hostname"]
+ambari_server_hostname = config['clusterHostInfo']['ambari_server_host'][0]
 rm_host = default("/clusterHostInfo/rm_host", [])
 slave_hosts = default("/clusterHostInfo/slave_hosts", [])
 hagios_server_hosts = default("/clusterHostInfo/nagios_server_host", [])

http://git-wip-us.apache.org/repos/asf/ambari/blob/4f5478d1/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-INSTALL/scripts/shared_initialization.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-INSTALL/scripts/shared_initialization.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-INSTALL/scripts/shared_initialization.py
index 0ae8b19..d7e952a 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-INSTALL/scripts/shared_initialization.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-INSTALL/scripts/shared_initialization.py
@@ -132,7 +132,9 @@ def setup_java():
   if not params.jdk_name:
     return
 
-  Execute(format("mkdir -p {artifact_dir} ; curl -kf --retry 10 {jdk_location}/{jdk_name} -o {jdk_curl_target}"),
+  Execute(format("mkdir -p {artifact_dir} ; '\
+  curl --noproxy {ambari_server_hostname} -kf \
+  --retry 10 {jdk_location}/{jdk_name} -o {jdk_curl_target}"),
           path = ["/bin","/usr/bin/"],
           not_if = format("test -e {java_exec}"))
 
@@ -145,13 +147,20 @@ def setup_java():
           path = ["/bin","/usr/bin/"],
           not_if = format("test -e {java_exec}")
   )
-  jce_curl_target = format("{artifact_dir}/{jce_policy_zip}")
-  download_jce = format("mkdir -p {artifact_dir}; curl -kf --retry 10 {jce_location}/{jce_policy_zip} -o {jce_curl_target}")
-  Execute( download_jce,
-           path = ["/bin","/usr/bin/"],
-           not_if =format("test -e {jce_curl_target}"),
-           ignore_failures = True
-  )
+
+  if params.jce_policy_zip is not None:
+    jce_curl_target = format("{artifact_dir}/{jce_policy_zip}")
+    download_jce = format("mkdir -p {artifact_dir}; \
+    curl --noproxy {ambari_server_hostname} -kf --retry 10 \
+    {jce_location}/{jce_policy_zip} -o {jce_curl_target}")
+    Execute( download_jce,
+             path = ["/bin","/usr/bin/"],
+             not_if =format("test -e {jce_curl_target}"),
+             ignore_failures = True
+    )
+  elif params.security_enabled:
+    # Something weird is happening
+    raise Fail("Security is enabled, but JCE policy zip is not specified.")
 
   if params.security_enabled:
     security_dir = format("{java_home}/jre/lib/security")

http://git-wip-us.apache.org/repos/asf/ambari/blob/4f5478d1/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/params.py
index 8b342f7..889f9fb 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/params.py
@@ -41,6 +41,7 @@ snmp_community = "hadoop"
 
 #hosts
 hostname = config["hostname"]
+ambari_server_hostname = config['clusterHostInfo']['ambari_server_host'][0]
 rm_host = default("/clusterHostInfo/rm_host", [])
 slave_hosts = default("/clusterHostInfo/slave_hosts", [])
 hagios_server_hosts = default("/clusterHostInfo/nagios_server_host", [])

http://git-wip-us.apache.org/repos/asf/ambari/blob/4f5478d1/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/shared_initialization.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/shared_initialization.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/shared_initialization.py
index f71b76c..804331d 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/shared_initialization.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/shared_initialization.py
@@ -100,10 +100,12 @@ def setup_database():
   db_driver_dload_cmd = ""
   if params.server_db_name == 'oracle' and params.oracle_driver_url != "":
     db_driver_dload_cmd = format(
-      "curl -kf --retry 5 {oracle_driver_url} -o {hadoop_lib_home}/{db_driver_filename}")
+      "curl --noproxy {ambari_server_hostname} -kf \
+      --retry 5 {oracle_driver_url} -o {hadoop_lib_home}/{db_driver_filename}")
   elif params.server_db_name == 'mysql' and params.mysql_driver_url != "":
     db_driver_dload_cmd = format(
-      "curl -kf --retry 5 {mysql_driver_url} -o {hadoop_lib_home}/{db_driver_filename}")
+      "curl --noproxy {ambari_server_hostname} -kf \
+      --retry 5 {mysql_driver_url} -o {hadoop_lib_home}/{db_driver_filename}")
 
   if db_driver_dload_cmd:
     Execute(db_driver_dload_cmd,

http://git-wip-us.apache.org/repos/asf/ambari/blob/4f5478d1/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/hive.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/hive.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/hive.py
index 42a8610..1445b24 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/hive.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/hive.py
@@ -67,7 +67,8 @@ def hive(name=None):
             group=params.user_group,
             mode=config_file_mode)
 
-  cmd = format("/bin/sh -c 'cd /usr/lib/ambari-agent/ && curl -kf --retry 5 "
+  cmd = format("/bin/sh -c 'cd /usr/lib/ambari-agent/ && "
+               "curl --noproxy {ambari_server_hostname} -kf --retry 5 "
                "{jdk_location}{check_db_connection_jar_name} -o {check_db_connection_jar_name}'")
 
   Execute(cmd,
@@ -186,7 +187,8 @@ def jdbc_connector():
 
   elif params.hive_jdbc_driver == "oracle.jdbc.driver.OracleDriver":
     cmd = format(
-      "mkdir -p {artifact_dir} ; curl -kf --retry 10 {driver_curl_source} -o {driver_curl_target} &&  "
+      "mkdir -p {artifact_dir} ; curl --noproxy {ambari_server_hostname} "
+      "-kf --retry 10 {driver_curl_source} -o {driver_curl_target} &&  "
       "cp {driver_curl_target} {target}")
 
     Execute(cmd,

http://git-wip-us.apache.org/repos/asf/ambari/blob/4f5478d1/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/params.py
index ab4aee7..5ab1753 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/params.py
@@ -50,6 +50,7 @@ check_db_connection_jar = format("/usr/lib/ambari-agent/{check_db_connection_jar
 hdp_stack_version = config['hostLevelParams']['stack_version']
 hive_metastore_port = config['configurations']['global']['hive_metastore_port']
 hive_var_lib = '/var/lib/hive'
+ambari_server_hostname = config['clusterHostInfo']['ambari_server_host'][0]
 hive_bin = '/usr/lib/hive/bin'
 hive_server_host = config['clusterHostInfo']['hive_server_host'][0]
 hive_server_port = 10000

http://git-wip-us.apache.org/repos/asf/ambari/blob/4f5478d1/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/NAGIOS/package/files/check_namenodes_ha.sh
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/NAGIOS/package/files/check_namenodes_ha.sh b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/NAGIOS/package/files/check_namenodes_ha.sh
index 50b075a..a0b348e 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/NAGIOS/package/files/check_namenodes_ha.sh
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/NAGIOS/package/files/check_namenodes_ha.sh
@@ -29,7 +29,7 @@ unavailableNN=()
 
 for nn in "${namenodes[@]}"
 do
-  status=$(curl -m 5 -s http://$nn:$port/jmx?qry=Hadoop:service=NameNode,name=FSNamesystem | grep -i "tag.HAState" | grep -o -E "standby|active")
+  status=$(curl --noproxy $nn -m 5 -s http://$nn:$port/jmx?qry=Hadoop:service=NameNode,name=FSNamesystem | grep -i "tag.HAState" | grep -o -E "standby|active")
   if [ "$status" == "active" ]; then
     activeNN[${#activeNN[*]}]="$nn"
   elif [ "$status" == "standby" ]; then

http://git-wip-us.apache.org/repos/asf/ambari/blob/4f5478d1/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/NAGIOS/package/files/check_nodemanager_health.sh
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/NAGIOS/package/files/check_nodemanager_health.sh b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/NAGIOS/package/files/check_nodemanager_health.sh
index 020b41d..5b2a93f 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/NAGIOS/package/files/check_nodemanager_health.sh
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/NAGIOS/package/files/check_nodemanager_health.sh
@@ -35,7 +35,7 @@ if [[ "$SEC_ENABLED" == "true" ]]; then
   fi
 fi
 
-RESPONSE=`curl --negotiate -u : -s $NODEMANAGER_URL`
+RESPONSE=`curl --noproxy $HOST --negotiate -u : -s $NODEMANAGER_URL`
 if [[ "$RESPONSE" == *'"nodeHealthy":true'* ]]; then 
   echo "OK: NodeManager healthy";
   exit 0;

http://git-wip-us.apache.org/repos/asf/ambari/blob/4f5478d1/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/NAGIOS/package/files/check_templeton_status.sh
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/NAGIOS/package/files/check_templeton_status.sh b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/NAGIOS/package/files/check_templeton_status.sh
index 7fbc4c4..6aa944b 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/NAGIOS/package/files/check_templeton_status.sh
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/NAGIOS/package/files/check_templeton_status.sh
@@ -35,7 +35,7 @@ if [[ "$SEC_ENABLED" == "true" ]]; then
   fi
 fi
 regex="^.*\"status\":\"ok\".*<status_code:200>$"
-out=`curl --negotiate -u : -s -w '<status_code:%{http_code}>' http://$HOST:$PORT/templeton/$VERSION/status 2>&1`
+out=`curl --noproxy $HOST --negotiate -u : -s -w '<status_code:%{http_code}>' http://$HOST:$PORT/templeton/$VERSION/status 2>&1`
 if [[ $out =~ $regex ]]; then
   out=`echo "$out" | sed -e 's/{/[/g' | sed -e 's/}/]/g'` 
   echo "OK: WebHCat Server status [$out]";

http://git-wip-us.apache.org/repos/asf/ambari/blob/4f5478d1/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/NAGIOS/package/files/check_webui.sh
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/NAGIOS/package/files/check_webui.sh b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/NAGIOS/package/files/check_webui.sh
index 8116dc5..a40fdf1 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/NAGIOS/package/files/check_webui.sh
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/NAGIOS/package/files/check_webui.sh
@@ -19,16 +19,17 @@
 # under the License.
 #
 #
-checkurl () {
-  url=$1
-  curl $url -o /dev/null
-  echo $?
-}
 
 service=$1
 host=$2
 port=$3
 
+checkurl () {
+  url=$1
+  curl $url --noproxy $host -o /dev/null
+  echo $?
+}
+
 if [[ -z "$service" || -z "$host" ]]; then
   echo "UNKNOWN: Invalid arguments; Usage: check_webui.sh service_name host_name";
   exit 3;

http://git-wip-us.apache.org/repos/asf/ambari/blob/4f5478d1/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/OOZIE/package/scripts/oozie.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/OOZIE/package/scripts/oozie.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/OOZIE/package/scripts/oozie.py
index 818874d..6feb7b1 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/OOZIE/package/scripts/oozie.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/OOZIE/package/scripts/oozie.py
@@ -68,7 +68,8 @@ def oozie(is_server=False # TODO: see if see can remove this
      params.jdbc_driver_name == "org.postgresql.Driver" or \
      params.jdbc_driver_name == "oracle.jdbc.driver.OracleDriver":
     Execute(format("/bin/sh -c 'cd /usr/lib/ambari-agent/ &&\
-    curl -kf --retry 5 {jdk_location}{check_db_connection_jar_name}\
+    curl --noproxy {ambari_server_hostname} \
+    -kf --retry 5 {jdk_location}{check_db_connection_jar_name}\
      -o {check_db_connection_jar_name}'"),
       not_if  = format("[ -f {check_db_connection_jar} ]")
     )

http://git-wip-us.apache.org/repos/asf/ambari/blob/4f5478d1/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/OOZIE/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/OOZIE/package/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/OOZIE/package/scripts/params.py
index 9a5df00..2a2b507 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/OOZIE/package/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/OOZIE/package/scripts/params.py
@@ -75,6 +75,7 @@ else:
   jdbc_driver_jar = ""
 
 hostname = config["hostname"]
+ambari_server_hostname = config['clusterHostInfo']['ambari_server_host'][0]
 falcon_host = default("/clusterHostInfo/falcon_server_hosts", [])
 has_falcon_host = not len(falcon_host)  == 0
 falcon_home = '/usr/lib/falcon'

http://git-wip-us.apache.org/repos/asf/ambari/blob/4f5478d1/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/package/files/templetonSmoke.sh
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/package/files/templetonSmoke.sh b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/package/files/templetonSmoke.sh
index 304b459..f27503b 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/package/files/templetonSmoke.sh
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/package/files/templetonSmoke.sh
@@ -33,7 +33,7 @@ else
   kinitcmd=""
 fi
 
-cmd="${kinitcmd}curl --negotiate -u : -s -w 'http_code <%{http_code}>'    $ttonurl/status 2>&1"
+cmd="${kinitcmd}curl --noproxy ${ttonhost} --negotiate -u : -s -w 'http_code <%{http_code}>'    $ttonurl/status 2>&1"
 retVal=`su - ${smoke_test_user} -c "$cmd"`
 httpExitCode=`echo $retVal |sed 's/.*http_code <\([0-9]*\)>.*/\1/'`
 
@@ -47,7 +47,7 @@ exit 0
 
 #try hcat ddl command
 echo "user.name=${smoke_test_user}&exec=show databases;" /tmp/show_db.post.txt
-cmd="${kinitcmd}curl --negotiate -u : -s -w 'http_code <%{http_code}>' -d  \@${destdir}/show_db.post.txt  $ttonurl/ddl 2>&1"
+cmd="${kinitcmd}curl --noproxy ${ttonhost} --negotiate -u : -s -w 'http_code <%{http_code}>' -d  \@${destdir}/show_db.post.txt  $ttonurl/ddl 2>&1"
 retVal=`su - ${smoke_test_user} -c "$cmd"`
 httpExitCode=`echo $retVal |sed 's/.*http_code <\([0-9]*\)>.*/\1/'`
 
@@ -83,7 +83,7 @@ su - ${smoke_test_user} -c "hadoop dfs -copyFromLocal /etc/passwd $ttonTestInput
 echo -n "user.name=${smoke_test_user}&file=/tmp/$ttonTestScript" > /tmp/pig_post.txt
 
 #submit pig query
-cmd="curl -s -w 'http_code <%{http_code}>' -d  \@${destdir}/pig_post.txt  $ttonurl/pig 2>&1"
+cmd="curl --noproxy ${ttonhost} -s -w 'http_code <%{http_code}>' -d  \@${destdir}/pig_post.txt  $ttonurl/pig 2>&1"
 retVal=`su - ${smoke_test_user} -c "$cmd"`
 httpExitCode=`echo $retVal |sed 's/.*http_code <\([0-9]*\)>.*/\1/'`
 if [[ "$httpExitCode" -ne "200" ]] ; then

http://git-wip-us.apache.org/repos/asf/ambari/blob/4f5478d1/ambari-server/src/test/java/org/apache/ambari/server/utils/TestStageUtils.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/utils/TestStageUtils.java b/ambari-server/src/test/java/org/apache/ambari/server/utils/TestStageUtils.java
index ee93431..7bfe605 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/utils/TestStageUtils.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/utils/TestStageUtils.java
@@ -17,6 +17,7 @@
  */
 package org.apache.ambari.server.utils;
 
+import static org.easymock.EasyMock.expect;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertTrue;
 
@@ -57,6 +58,13 @@ import org.codehaus.jackson.JsonGenerationException;
 import org.codehaus.jackson.map.JsonMappingException;
 import org.junit.Before;
 import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.powermock.core.classloader.annotations.PowerMockIgnore;
+import org.powermock.core.classloader.annotations.PrepareForTest;
+import org.powermock.modules.junit4.PowerMockRunner;
+import static org.powermock.api.easymock.PowerMock.replayAll;
+import java.net.InetAddress;
+import static org.powermock.api.easymock.PowerMock.*;
 
 import com.google.common.collect.ContiguousSet;
 import com.google.common.collect.DiscreteDomain;
@@ -64,6 +72,9 @@ import com.google.common.collect.Range;
 import com.google.inject.Guice;
 import com.google.inject.Injector;
 
+@RunWith(PowerMockRunner.class)
+@PrepareForTest(StageUtils.class)
+@PowerMockIgnore("javax.management.*")
 public class TestStageUtils {
   private static final String HOSTS_LIST = "all_hosts";
 
@@ -151,9 +162,10 @@ public class TestStageUtils {
   @Test
   public void testGetClusterHostInfo() throws AmbariException, UnknownHostException {
     Clusters fsm = injector.getInstance(Clusters.class);
-    
+    String h1 = "h1";
+
     List<String> hostList = new ArrayList<String>();
-    hostList.add("h1");
+    hostList.add(h1);
     hostList.add("h2");
     hostList.add("h3");
     hostList.add("h4");
@@ -163,7 +175,11 @@ public class TestStageUtils {
     hostList.add("h8");
     hostList.add("h9");
     hostList.add("h10");
-    
+
+    mockStaticPartial(StageUtils.class, "getHostName");
+    expect(StageUtils.getHostName()).andReturn(h1).anyTimes();
+    replayAll();
+
     List<Integer> pingPorts = Arrays.asList(StageUtils.DEFAULT_PING_PORT,
         StageUtils.DEFAULT_PING_PORT,
         StageUtils.DEFAULT_PING_PORT,
@@ -232,9 +248,11 @@ public class TestStageUtils {
         StageUtils.getClusterHostInfo(fsm.getHostsForCluster("c1"), fsm.getCluster("c1"));
 
     //All hosts present in cluster host info
-    assertEquals(fsm.getHosts().size(), info.get(HOSTS_LIST).size());
+    Set<String> allHosts = info.get(HOSTS_LIST);
+    ArrayList<String> allHostsList = new ArrayList<String>(allHosts);
+    assertEquals(fsm.getHosts().size(), allHosts.size());
     for (Host host: fsm.getHosts()) {
-      assertTrue(info.get(HOSTS_LIST).contains(host.getHostName()));
+      assertTrue(allHosts.contains(host.getHostName()));
     }
     
     
@@ -267,7 +285,7 @@ public class TestStageUtils {
     
     List<Integer> pingPortsActual = getRangeMappedDecompressedSet(actualPingPorts);
 
-    List<Integer> reindexedPorts = getReindexedList(pingPortsActual, new ArrayList<String>(info.get(HOSTS_LIST)), hostList);
+    List<Integer> reindexedPorts = getReindexedList(pingPortsActual, new ArrayList<String>(allHosts), hostList);
     
     //Treat null values
     while (pingPorts.contains(null)) {
@@ -282,6 +300,13 @@ public class TestStageUtils {
     assertTrue(info.containsKey("decom_tt_hosts"));
     Set<String> decommissionedHosts = info.get("decom_tt_hosts");
     assertEquals(2, decommissionedHosts.toString().split(",").length);
+
+    // check server hostname field
+    assertTrue(info.containsKey(StageUtils.AMBARI_SERVER_HOST));
+    Set<String> serverHost = info.get(StageUtils.AMBARI_SERVER_HOST);
+    assertEquals(1, serverHost.toArray().length);
+    int serverHostIndex = Integer.valueOf(serverHost.iterator().next());
+    assertEquals(h1, allHostsList.get(serverHostIndex));
   }
 
   private void checkServiceCompression(Map<String, Set<String>> info,

http://git-wip-us.apache.org/repos/asf/ambari/blob/4f5478d1/ambari-server/src/test/python/stacks/1.3.2/HIVE/test_hive_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/1.3.2/HIVE/test_hive_client.py b/ambari-server/src/test/python/stacks/1.3.2/HIVE/test_hive_client.py
index 80a3c5c..98f6321 100644
--- a/ambari-server/src/test/python/stacks/1.3.2/HIVE/test_hive_client.py
+++ b/ambari-server/src/test/python/stacks/1.3.2/HIVE/test_hive_client.py
@@ -40,7 +40,7 @@ class TestHiveClient(RMFTestCase):
       conf_dir = '/etc/hive/conf',
       configurations = self.getConfig()['configurations']['hive-site'],
     )
-    self.assertResourceCalled('Execute', "/bin/sh -c 'cd /usr/lib/ambari-agent/ && curl -kf --retry 5 http://c6401.ambari.apache.org:8080/resources/DBConnectionVerification.jar -o DBConnectionVerification.jar'",
+    self.assertResourceCalled('Execute', "/bin/sh -c 'cd /usr/lib/ambari-agent/ && curl -kf --noproxy c6401.ambari.apache.org --retry 5 http://c6401.ambari.apache.org:8080/resources/DBConnectionVerification.jar -o DBConnectionVerification.jar'",
       not_if = '[ -f DBConnectionVerification.jar]',
     )
     self.assertResourceCalled('File', '/etc/hive/conf/hive-env.sh',
@@ -92,7 +92,7 @@ class TestHiveClient(RMFTestCase):
       conf_dir = '/etc/hive/conf',
       configurations = self.getConfig()['configurations']['hive-site'],
     )
-    self.assertResourceCalled('Execute', "/bin/sh -c 'cd /usr/lib/ambari-agent/ && curl -kf --retry 5 http://c6401.ambari.apache.org:8080/resources/DBConnectionVerification.jar -o DBConnectionVerification.jar'",
+    self.assertResourceCalled('Execute', u"/bin/sh -c 'cd /usr/lib/ambari-agent/ && curl -kf --noproxy c6401.ambari.apache.org --retry 5 http://c6401.ambari.apache.org:8080/resources/DBConnectionVerification.jar -o DBConnectionVerification.jar'",
       not_if = '[ -f DBConnectionVerification.jar]',
     )
     self.assertResourceCalled('File', '/etc/hive/conf/hive-env.sh',

http://git-wip-us.apache.org/repos/asf/ambari/blob/4f5478d1/ambari-server/src/test/python/stacks/1.3.2/HIVE/test_hive_metastore.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/1.3.2/HIVE/test_hive_metastore.py b/ambari-server/src/test/python/stacks/1.3.2/HIVE/test_hive_metastore.py
index 3914844..1366520 100644
--- a/ambari-server/src/test/python/stacks/1.3.2/HIVE/test_hive_metastore.py
+++ b/ambari-server/src/test/python/stacks/1.3.2/HIVE/test_hive_metastore.py
@@ -176,7 +176,7 @@ class TestHiveMetastore(RMFTestCase):
       conf_dir = '/etc/hive/conf.server',
       configurations = self.getConfig()['configurations']['hive-site'],
     )
-    self.assertResourceCalled('Execute', "/bin/sh -c 'cd /usr/lib/ambari-agent/ && curl -kf --retry 5 http://c6401.ambari.apache.org:8080/resources/DBConnectionVerification.jar -o DBConnectionVerification.jar'",
+    self.assertResourceCalled('Execute', "/bin/sh -c 'cd /usr/lib/ambari-agent/ && curl -kf --noproxy c6401.ambari.apache.org --retry 5 http://c6401.ambari.apache.org:8080/resources/DBConnectionVerification.jar -o DBConnectionVerification.jar'",
       not_if = '[ -f DBConnectionVerification.jar]',
     )
     self.assertResourceCalled('File', '/tmp/start_metastore_script',
@@ -233,7 +233,7 @@ class TestHiveMetastore(RMFTestCase):
       conf_dir = '/etc/hive/conf.server',
       configurations = self.getConfig()['configurations']['hive-site'],
     )
-    self.assertResourceCalled('Execute', "/bin/sh -c 'cd /usr/lib/ambari-agent/ && curl -kf --retry 5 http://c6401.ambari.apache.org:8080/resources/DBConnectionVerification.jar -o DBConnectionVerification.jar'",
+    self.assertResourceCalled('Execute', "/bin/sh -c 'cd /usr/lib/ambari-agent/ && curl -kf --noproxy c6401.ambari.apache.org --retry 5 http://c6401.ambari.apache.org:8080/resources/DBConnectionVerification.jar -o DBConnectionVerification.jar'",
       not_if = '[ -f DBConnectionVerification.jar]',
     )
     self.assertResourceCalled('File', '/tmp/start_metastore_script',

http://git-wip-us.apache.org/repos/asf/ambari/blob/4f5478d1/ambari-server/src/test/python/stacks/1.3.2/HIVE/test_hive_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/1.3.2/HIVE/test_hive_server.py b/ambari-server/src/test/python/stacks/1.3.2/HIVE/test_hive_server.py
index f5997e4..700bb66 100644
--- a/ambari-server/src/test/python/stacks/1.3.2/HIVE/test_hive_server.py
+++ b/ambari-server/src/test/python/stacks/1.3.2/HIVE/test_hive_server.py
@@ -205,7 +205,7 @@ class TestHiveServer(RMFTestCase):
       conf_dir = '/etc/hive/conf.server',
       configurations = self.getConfig()['configurations']['hive-site'],
     )
-    self.assertResourceCalled('Execute', "/bin/sh -c 'cd /usr/lib/ambari-agent/ && curl -kf --retry 5 http://c6401.ambari.apache.org:8080/resources/DBConnectionVerification.jar -o DBConnectionVerification.jar'",
+    self.assertResourceCalled('Execute', "/bin/sh -c 'cd /usr/lib/ambari-agent/ && curl -kf --noproxy c6401.ambari.apache.org --retry 5 http://c6401.ambari.apache.org:8080/resources/DBConnectionVerification.jar -o DBConnectionVerification.jar'",
       not_if = '[ -f DBConnectionVerification.jar]',
     )
     self.assertResourceCalled('File', '/tmp/start_hiveserver2_script',
@@ -290,7 +290,7 @@ class TestHiveServer(RMFTestCase):
       conf_dir = '/etc/hive/conf.server',
       configurations = self.getConfig()['configurations']['hive-site'],
     )
-    self.assertResourceCalled('Execute', "/bin/sh -c 'cd /usr/lib/ambari-agent/ && curl -kf --retry 5 http://c6401.ambari.apache.org:8080/resources/DBConnectionVerification.jar -o DBConnectionVerification.jar'",
+    self.assertResourceCalled('Execute', "/bin/sh -c 'cd /usr/lib/ambari-agent/ && curl -kf --noproxy c6401.ambari.apache.org --retry 5 http://c6401.ambari.apache.org:8080/resources/DBConnectionVerification.jar -o DBConnectionVerification.jar'",
       not_if = '[ -f DBConnectionVerification.jar]',
     )
     self.assertResourceCalled('File', '/tmp/start_hiveserver2_script',

http://git-wip-us.apache.org/repos/asf/ambari/blob/4f5478d1/ambari-server/src/test/python/stacks/1.3.2/configs/default.hbasedecom.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/1.3.2/configs/default.hbasedecom.json b/ambari-server/src/test/python/stacks/1.3.2/configs/default.hbasedecom.json
index a1f62bd..9295c69 100644
--- a/ambari-server/src/test/python/stacks/1.3.2/configs/default.hbasedecom.json
+++ b/ambari-server/src/test/python/stacks/1.3.2/configs/default.hbasedecom.json
@@ -378,6 +378,9 @@
     }, 
     "commandId": "1-1", 
     "clusterHostInfo": {
+        "ambari_server_host": [
+            "c6401.ambari.apache.org"
+        ],
         "snamenode_host": [
             "c6402.ambari.apache.org"
         ], 

http://git-wip-us.apache.org/repos/asf/ambari/blob/4f5478d1/ambari-server/src/test/python/stacks/1.3.2/configs/default.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/1.3.2/configs/default.json b/ambari-server/src/test/python/stacks/1.3.2/configs/default.json
index 40cdc57..4f1ec93 100644
--- a/ambari-server/src/test/python/stacks/1.3.2/configs/default.json
+++ b/ambari-server/src/test/python/stacks/1.3.2/configs/default.json
@@ -426,6 +426,9 @@
     }, 
     "commandId": "1-1", 
     "clusterHostInfo": {
+        "ambari_server_host": [
+            "c6401.ambari.apache.org"
+        ],
         "snamenode_host": [
             "c6402.ambari.apache.org"
         ], 

http://git-wip-us.apache.org/repos/asf/ambari/blob/4f5478d1/ambari-server/src/test/python/stacks/1.3.2/configs/default.non_gmetad_host.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/1.3.2/configs/default.non_gmetad_host.json b/ambari-server/src/test/python/stacks/1.3.2/configs/default.non_gmetad_host.json
index 00ddf6a..57ec6d9 100644
--- a/ambari-server/src/test/python/stacks/1.3.2/configs/default.non_gmetad_host.json
+++ b/ambari-server/src/test/python/stacks/1.3.2/configs/default.non_gmetad_host.json
@@ -426,6 +426,9 @@
     }, 
     "commandId": "1-1", 
     "clusterHostInfo": {
+        "ambari_server_host": [
+            "c6401.ambari.apache.org"
+        ],
         "snamenode_host": [
             "c6402.ambari.apache.org"
         ], 

http://git-wip-us.apache.org/repos/asf/ambari/blob/4f5478d1/ambari-server/src/test/python/stacks/1.3.2/configs/secured.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/1.3.2/configs/secured.json b/ambari-server/src/test/python/stacks/1.3.2/configs/secured.json
index 8c5f6b5..44f6506 100644
--- a/ambari-server/src/test/python/stacks/1.3.2/configs/secured.json
+++ b/ambari-server/src/test/python/stacks/1.3.2/configs/secured.json
@@ -531,6 +531,9 @@
     }, 
     "commandId": "4-2", 
     "clusterHostInfo": {
+        "ambari_server_host": [
+            "c6401.ambari.apache.org"
+        ],
         "snamenode_host": [
             "c6402.ambari.apache.org"
         ], 

http://git-wip-us.apache.org/repos/asf/ambari/blob/4f5478d1/ambari-server/src/test/python/stacks/1.3.2/configs/secured_no_jce_name.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/1.3.2/configs/secured_no_jce_name.json b/ambari-server/src/test/python/stacks/1.3.2/configs/secured_no_jce_name.json
new file mode 100644
index 0000000..b824d40
--- /dev/null
+++ b/ambari-server/src/test/python/stacks/1.3.2/configs/secured_no_jce_name.json
@@ -0,0 +1,600 @@
+{
+    "roleCommand": "START", 
+    "clusterName": "cl1", 
+    "hostname": "c6402.ambari.apache.org", 
+    "hostLevelParams": {
+        "jdk_location": "http://c6401.ambari.apache.org:8080/resources/", 
+        "ambari_db_rca_password": "mapred", 
+        "ambari_db_rca_url": "jdbc:postgresql://c6401.ambari.apache.org/ambarirca", 
+        "oracle_jdbc_url": "http://c6401.ambari.apache.org:8080/resources//ojdbc6.jar",
+        "repo_info": "[{\"baseUrl\":\"http://public-repo-1.hortonworks.com/HDP/centos6/1.x/updates/1.3.3.0\",\"osType\":\"centos6\",\"repoId\":\"HDP-1.3.4\",\"repoName\":\"HDP\",\"defaultBaseUrl\":\"http://public-repo-1.hortonworks.com/HDP/centos6/1.x/updates/1.3.3.0\"}]", 
+        "package_list": "[{\"type\":\"rpm\",\"name\":\"hive\"},{\"type\":\"rpm\",\"name\":\"mysql-connector-java\"},{\"type\":\"rpm\",\"name\":\"mysql\"},{\"type\":\"rpm\",\"name\":\"mysql-server\"}]", 
+        "stack_version": "1.3.4", 
+        "stack_name": "HDP", 
+        "db_name": "ambari", 
+        "ambari_db_rca_driver": "org.postgresql.Driver", 
+        "jdk_name": "jdk-7u45-linux-x64.tar.gz", 
+        "ambari_db_rca_username": "mapred", 
+        "java_home": "/usr/jdk64/jdk1.7.0_45", 
+        "mysql_jdbc_url": "http://c6401.ambari.apache.org:8080/resources//mysql-connector-java.jar"
+    }, 
+    "commandType": "EXECUTION_COMMAND", 
+    "roleParams": {}, 
+    "serviceName": "HIVE", 
+    "role": "MYSQL_SERVER", 
+    "commandParams": {
+        "command_timeout": "600", 
+        "service_package_folder": "HIVE",
+        "script_type": "PYTHON", 
+        "schema_version": "2.0", 
+        "script": "scripts/mysql_server.py",
+        "excluded_hosts": "host1",
+        "mark_draining_only" : "false",
+        "update_exclude_file_only" : "false"
+    },
+    "taskId": 117, 
+    "public_hostname": "c6402.ambari.apache.org", 
+    "configurations": {
+        "mapred-site": {
+            "ambari.mapred.child.java.opts.memory": "768", 
+            "mapred.job.reduce.input.buffer.percent": "0.0", 
+            "mapred.job.map.memory.mb": "1536", 
+            "mapred.output.compression.type": "BLOCK", 
+            "mapred.jobtracker.maxtasks.per.job": "-1", 
+            "mapreduce.jobtracker.keytab.file": "/etc/security/keytabs/jt.service.keytab", 
+            "mapred.map.output.compression.codec": "org.apache.hadoop.io.compress.SnappyCodec", 
+            "mapred.child.root.logger": "INFO,TLA", 
+            "mapred.tasktracker.tasks.sleeptime-before-sigkill": "250", 
+            "io.sort.spill.percent": "0.9", 
+            "mapred.reduce.parallel.copies": "30", 
+            "mapred.userlog.retain.hours": "24", 
+            "mapred.reduce.tasks.speculative.execution": "false", 
+            "mapred.healthChecker.interval": "135000", 
+            "io.sort.mb": "200", 
+            "mapreduce.jobtracker.kerberos.principal": "jt/_HOST@EXAMPLE.COM", 
+            "mapred.jobtracker.blacklist.fault-timeout-window": "180", 
+            "mapreduce.cluster.administrators": " hadoop", 
+            "mapred.job.shuffle.input.buffer.percent": "0.7", 
+            "mapred.job.tracker.history.completed.location": "/mapred/history/done", 
+            "io.sort.record.percent": ".2", 
+            "mapred.cluster.max.reduce.memory.mb": "4096", 
+            "mapred.job.reuse.jvm.num.tasks": "1", 
+            "mapreduce.jobhistory.intermediate-done-dir": "/mr-history/tmp", 
+            "mapred.job.tracker.http.address": "c6402.ambari.apache.org:50030", 
+            "mapred.job.tracker.persist.jobstatus.hours": "1", 
+            "mapred.healthChecker.script.path": "/etc/hadoop/conf/health_check", 
+            "mapreduce.jobtracker.staging.root.dir": "/user", 
+            "mapred.job.shuffle.merge.percent": "0.66", 
+            "mapred.cluster.reduce.memory.mb": "2048", 
+            "mapred.job.tracker.persist.jobstatus.dir": "/mapred/jobstatus", 
+            "mapreduce.tasktracker.group": "hadoop", 
+            "mapred.tasktracker.map.tasks.maximum": "4", 
+            "mapred.child.java.opts": "-server -Xmx${ambari.mapred.child.java.opts.memory}m -Djava.net.preferIPv4Stack=true", 
+            "mapreduce.jobhistory.keytab.file": "/etc/security/keytabs/jt.service.keytab", 
+            "mapred.jobtracker.retirejob.check": "10000", 
+            "mapred.job.tracker": "c6402.ambari.apache.org:50300", 
+            "mapreduce.history.server.embedded": "false", 
+            "io.sort.factor": "100", 
+            "hadoop.job.history.user.location": "none", 
+            "mapreduce.reduce.input.limit": "10737418240", 
+            "mapred.reduce.slowstart.completed.maps": "0.05", 
+            "mapred.cluster.max.map.memory.mb": "6144", 
+            "mapreduce.tasktracker.keytab.file": "/etc/security/keytabs/tt.service.keytab", 
+            "mapred.jobtracker.taskScheduler": "org.apache.hadoop.mapred.CapacityTaskScheduler", 
+            "mapred.max.tracker.blacklists": "16", 
+            "mapreduce.tasktracker.kerberos.principal": "tt/_HOST@EXAMPLE.COM", 
+            "mapred.local.dir": "/hadoop/mapred", 
+            "mapreduce.history.server.http.address": "c6402.ambari.apache.org:51111", 
+            "mapred.jobtracker.restart.recover": "false", 
+            "mapred.jobtracker.blacklist.fault-bucket-width": "15", 
+            "mapred.jobtracker.retirejob.interval": "21600000", 
+            "tasktracker.http.threads": "50", 
+            "mapred.job.tracker.persist.jobstatus.active": "false", 
+            "mapred.system.dir": "/mapred/system", 
+            "mapred.tasktracker.reduce.tasks.maximum": "2", 
+            "mapred.cluster.map.memory.mb": "1536", 
+            "mapred.hosts.exclude": "/etc/hadoop/conf/mapred.exclude", 
+            "mapred.queue.names": "default", 
+            "mapreduce.jobhistory.webapp.address": "c6402.ambari.apache.org:19888", 
+            "mapreduce.fileoutputcommitter.marksuccessfuljobs": "false", 
+            "mapred.job.reduce.memory.mb": "2048", 
+            "mapreduce.jobhistory.done-dir": "/mr-history/done", 
+            "mapred.healthChecker.script.timeout": "60000", 
+            "jetty.connector": "org.mortbay.jetty.nio.SelectChannelConnector", 
+            "mapreduce.jobtracker.split.metainfo.maxsize": "50000000", 
+            "mapred.job.tracker.handler.count": "50", 
+            "mapred.inmem.merge.threshold": "1000", 
+            "mapred.hosts": "/etc/hadoop/conf/mapred.include", 
+            "mapred.task.tracker.task-controller": "org.apache.hadoop.mapred.LinuxTaskController", 
+            "mapred.jobtracker.completeuserjobs.maximum": "0", 
+            "mapred.task.timeout": "600000", 
+            "mapreduce.jobhistory.kerberos.principal": "jt/_HOST@EXAMPLE.COM", 
+            "mapred.map.tasks.speculative.execution": "false"
+        }, 
+        "oozie-site": {
+            "oozie.service.PurgeService.purge.interval": "3600", 
+            "oozie.service.CallableQueueService.queue.size": "1000", 
+            "oozie.service.SchemaService.wf.ext.schemas": "shell-action-0.1.xsd,email-action-0.1.xsd,hive-action-0.2.xsd,sqoop-action-0.2.xsd,ssh-action-0.1.xsd,distcp-action-0.1.xsd,hive-action-0.3.xsd",
+            "oozie.service.JPAService.jdbc.url": "jdbc:derby:${oozie.data.dir}/${oozie.db.schema.name}-db;create=true", 
+            "oozie.service.HadoopAccessorService.nameNode.whitelist": " ", 
+            "oozie.service.JPAService.jdbc.driver": "org.apache.derby.jdbc.EmbeddedDriver", 
+            "local.realm": "EXAMPLE.COM", 
+            "use.system.libpath.for.mapreduce.and.pig.jobs": "false", 
+            "oozie.service.HadoopAccessorService.kerberos.enabled": "true", 
+            "oozie.service.JPAService.create.db.schema": "false", 
+            "oozie.authentication.kerberos.name.rules": "RULE:[2:$1@$0](jt@.*EXAMPLE.COM)s/.*/mapred/\nRULE:[2:$1@$0](tt@.*EXAMPLE.COM)s/.*/mapred/\nRULE:[2:$1@$0](nn@.*EXAMPLE.COM)s/.*/hdfs/\nRULE:[2:$1@$0](dn@.*EXAMPLE.COM)s/.*/hdfs/\nRULE:[2:$1@$0](hbase@.*EXAMPLE.COM)s/.*/hbase/\nRULE:[2:$1@$0](hbase@.*EXAMPLE.COM)s/.*/hbase/\nDEFAULT", 
+            "oozie.authentication.kerberos.keytab": "/etc/security/keytabs/spnego.service.keytab", 
+            "oozie.service.ActionService.executor.ext.classes": "org.apache.oozie.action.email.EmailActionExecutor,\norg.apache.oozie.action.hadoop.HiveActionExecutor,\norg.apache.oozie.action.hadoop.ShellActionExecutor,\norg.apache.oozie.action.hadoop.SqoopActionExecutor,\norg.apache.oozie.action.hadoop.DistcpActionExecutor", 
+            "oozie.service.HadoopAccessorService.kerberos.principal": "oozie/c6402.ambari.apache.org@EXAMPLE.COM", 
+            "oozie.service.AuthorizationService.authorization.enabled": "true", 
+            "oozie.base.url": "http://c6402.ambari.apache.org:11000/oozie", 
+            "oozie.service.JPAService.jdbc.password": "q", 
+            "oozie.service.coord.normal.default.timeout": "120", 
+            "oozie.service.JPAService.pool.max.active.conn": "10", 
+            "oozie.service.PurgeService.older.than": "30", 
+            "oozie.db.schema.name": "oozie", 
+            "oozie.service.HadoopAccessorService.hadoop.configurations": "*=/etc/hadoop/conf", 
+            "oozie.service.HadoopAccessorService.jobTracker.whitelist": " ", 
+            "oozie.service.CallableQueueService.callable.concurrency": "3", 
+            "oozie.service.JPAService.jdbc.username": "oozie", 
+            "oozie.service.CallableQueueService.threads": "10", 
+            "oozie.systemmode": "NORMAL", 
+            "oozie.service.HadoopAccessorService.keytab.file": "/etc/security/keytabs/oozie.service.keytab", 
+            "oozie.service.WorkflowAppService.system.libpath": "/user/${user.name}/share/lib", 
+            "oozie.authentication.type": "kerberos", 
+            "oozie.authentication.kerberos.principal": "HTTP/c6402.ambari.apache.org@EXAMPLE.COM", 
+            "oozie.system.id": "oozie-${user.name}"
+        }, 
+        "webhcat-site": {
+            "templeton.pig.path": "pig.tar.gz/pig/bin/pig", 
+            "templeton.hive.properties": "hive.metastore.local=false,hive.metastore.uris=thrift://c6402.ambari.apache.org:9083,hive.metastore.sasl.enabled=true,hive.metastore.execute.setugi=true,hive.metastore.warehouse.dir=/apps/hive/warehouse,hive.exec.mode.local.auto=false,hive.metastore.kerberos.principal=hive/_HOST@EXAMPLE.COM", 
+            "templeton.override.enabled": "false", 
+            "templeton.jar": "/usr/lib/hcatalog/share/webhcat/svr/webhcat.jar", 
+            "templeton.kerberos.secret": "secret", 
+            "templeton.kerberos.principal": "HTTP/c6402.ambari.apache.org@EXAMPLE.COM", 
+            "templeton.zookeeper.hosts": "c6401.ambari.apache.org:2181", 
+            "templeton.exec.timeout": "60000", 
+            "templeton.storage.class": "org.apache.hcatalog.templeton.tool.ZooKeeperStorage", 
+            "templeton.hive.archive": "hdfs:///apps/webhcat/hive.tar.gz", 
+            "templeton.streaming.jar": "hdfs:///apps/webhcat/hadoop-streaming.jar", 
+            "templeton.port": "50111", 
+            "templeton.hadoop.conf.dir": "/etc/hadoop/conf", 
+            "templeton.libjars": "/usr/lib/zookeeper/zookeeper.jar", 
+            "templeton.hadoop": "/usr/bin/hadoop", 
+            "templeton.hive.path": "hive.tar.gz/hive/bin/hive", 
+            "templeton.kerberos.keytab": "/etc/security/keytabs/spnego.service.keytab", 
+            "templeton.hcat": "/usr/bin/hcat", 
+            "templeton.pig.archive": "hdfs:///apps/webhcat/pig.tar.gz"
+        }, 
+        "global": {
+            "tasktracker_task_controller": "org.apache.hadoop.mapred.LinuxTaskController", 
+            "oozie_keytab": "/etc/security/keytabs/oozie.service.keytab", 
+            "hadoop_http_principal_name": "HTTP/_HOST", 
+            "kinit_path_local": "/usr/bin", 
+            "nagios_keytab_path": "/etc/security/keytabs/nagios.service.keytab", 
+            "hbase_regionserver_heapsize": "1024m", 
+            "datanode_primary_name": "dn", 
+            "namenode_principal_name": "nn/_HOST", 
+            "namenode_keytab": "/etc/security/keytabs/nn.service.keytab", 
+            "nagios_principal_name": "nagios/c6402.ambari.apache.org@EXAMPLE.COM", 
+            "dfs_datanode_http_address": "1022", 
+            "hbase_user_keytab": "/etc/security/keytabs/hbase.headless.keytab", 
+            "jobtracker_primary_name": "jt", 
+            "hbase_pid_dir": "/var/run/hbase", 
+            "namenode_opt_maxnewsize": "200m", 
+            "syncLimit": "5", 
+            "clientPort": "2181", 
+            "oozie_jdbc_driver": "org.apache.derby.jdbc.EmbeddedDriver", 
+            "hive_metastore_primary_name": "hive", 
+            "hbase_master_keytab": "/etc/security/keytabs/hbase.service.keytab", 
+            "nagios_primary_name": "nagios", 
+            "jobtracker_principal_name": "jt/_HOST", 
+            "hive_database": "New MySQL Database", 
+            "hcat_pid_dir": "/etc/run/webhcat", 
+            "oozie_derby_database": "Derby", 
+            "snappy_enabled": "true", 
+            "oozie_pid_dir": "/var/run/oozie", 
+            "datanode_principal_name": "dn/_HOST", 
+            "hive_metastore_keytab": "/etc/security/keytabs/hive.service.keytab", 
+            "nagios_group": "nagios", 
+            "hcat_user": "hcat", 
+            "hadoop_heapsize": "1024", 
+            "hbase_regionserver_primary_name": "hbase", 
+            "zk_user": "zookeeper", 
+            "rrdcached_base_dir": "/var/lib/ganglia/rrds", 
+            "keytab_path": "/etc/security/keytabs", 
+            "hive_pid_dir": "/var/run/hive", 
+            "webhcat_server": "c6402.ambari.apache.org", 
+            "zk_data_dir": "/hadoop/zookeeper", 
+            "hcat_log_dir": "/var/log/webhcat", 
+            "oozie_hostname": "c6402.ambari.apache.org", 
+            "tasktracker_principal_name": "tt/_HOST", 
+            "jobtracker_keytab": "/etc/security/keytabs/jt.service.keytab", 
+            "tasktracker_keytab": "/etc/security/keytabs/tt.service.keytab", 
+            "zookeeper_keytab_path": "/etc/security/keytabs/zk.service.keytab", 
+            "namenode_heapsize": "1024m", 
+            "smokeuser_keytab": "/etc/security/keytabs/smokeuser.headless.keytab", 
+            "kerberos_domain": "EXAMPLE.COM", 
+            "snamenode_keytab": "/etc/security/keytabs/nn.service.keytab", 
+            "nagios_server": "c6402.ambari.apache.org", 
+            "ganglia_runtime_dir": "/var/run/ganglia/hdp", 
+            "lzo_enabled": "true", 
+            "oozie_principal_name": "oozie/c6402.ambari.apache.org", 
+            "dfs_datanode_address": "1019", 
+            "namenode_opt_newsize": "200m", 
+            "initLimit": "10", 
+            "hive_database_type": "mysql", 
+            "zk_pid_dir": "/var/run/zookeeper", 
+            "namenode_primary_name": "nn", 
+            "tickTime": "2000", 
+            "hive_metastore_principal_name": "hive/_HOST", 
+            "datanode_keytab": "/etc/security/keytabs/dn.service.keytab", 
+            "zk_log_dir": "/var/log/zookeeper", 
+            "oozie_http_principal_name": "HTTP/c6402.ambari.apache.org", 
+            "tasktracker_primary_name": "tt", 
+            "hadoop_http_keytab": "/etc/security/keytabs/spnego.service.keytab", 
+            "gmetad_user": "nobody", 
+            "oozie_http_keytab": "/etc/security/keytabs/spnego.service.keytab", 
+            "hive_metastore": "c6402.ambari.apache.org", 
+            "nagios_user": "nagios", 
+            "security_enabled": "true", 
+            "proxyuser_group": "users", 
+            "namenode_formatted_mark_dir": "/var/run/hadoop/hdfs/namenode/formatted/", 
+            "hbase_primary_name": "hbase", 
+            "oozie_http_primary_name": "HTTP", 
+            "dtnode_heapsize": "1024m", 
+            "zookeeper_principal_name": "zookeeper/_HOST@EXAMPLE.COM", 
+            "oozie_log_dir": "/var/log/oozie", 
+            "webhcat_http_keytab": "/etc/security/keytabs/spnego.service.keytab", 
+            "hdfs_user_keytab": "/etc/security/keytabs/hdfs.headless.keytab", 
+            "oozie_user": "oozie", 
+            "oozie_data_dir": "/hadoop/oozie/data", 
+            "oozie_primary_name": "oozie", 
+            "hdfs_log_dir_prefix": "/var/log/hadoop", 
+            "zookeeper_primary_name": "zookeeper", 
+            "hbase_master_principal_name": "hbase/_HOST", 
+            "jtnode_heapsize": "1024m", 
+            "yarn_user": "yarn", 
+            "gmond_user": "nobody", 
+            "nagios_web_login": "nagiosadmin", 
+            "nagios_contact": "q@q.q", 
+            "snamenode_primary_name": "nn", 
+            "hdfs_user": "hdfs", 
+            "oozie_database_type": "derby", 
+            "webhcat_user": "hcat", 
+            "hive_hostname": "c6402.ambari.apache.org", 
+            "hbase_regionserver_principal_name": "hbase/_HOST", 
+            "hive_log_dir": "/var/log/hive", 
+            "smokeuser_principal_name": "ambari-qa", 
+            "mapred_user": "mapred", 
+            "smokeuser_primary_name": "ambari-qa", 
+            "jtnode_opt_maxnewsize": "200m", 
+            "hbase_master_primary_name": "hbase", 
+            "oozie_servername": "c6402.ambari.apache.org", 
+            "hdfs_primary_name": "hdfs", 
+            "hive_ambari_database": "MySQL", 
+            "rca_enabled": "true", 
+            "hadoop_http_primary_name": "HTTP", 
+            "webHCat_http_principal_name": "HTTP/c6402.ambari.apache.org", 
+            "mysql_connector_url": "${download_url}/mysql-connector-java-5.1.18.zip", 
+            "hive_metastore_port": "9083", 
+            "hbase_user": "hbase", 
+            "snamenode_principal_name": "nn/_HOST", 
+            "oozie_database": "New Derby Database", 
+            "hbase_log_dir": "/var/log/hbase", 
+            "user_group": "hadoop", 
+            "hive_user": "hive", 
+            "webHCat_http_primary_name": "HTTP", 
+            "nagios_web_password": "!`\"' 1", 
+            "smokeuser": "ambari-qa", 
+            "ganglia_conf_dir": "/etc/ganglia/hdp", 
+            "hbase_master_heapsize": "1024m", 
+            "kerberos_install_type": "MANUALLY_SET_KERBEROS", 
+            "hadoop_pid_dir_prefix": "/var/run/hadoop", 
+            "hive_aux_jars_path": "/usr/lib/hcatalog/share/hcatalog/hcatalog-core.jar", 
+            "jtnode_opt_newsize": "200m", 
+            "hbase_regionserver_keytab": "/etc/security/keytabs/hbase.service.keytab", 
+            "hbase_principal_name": "hbase", 
+            "hdfs_principal_name": "hdfs"
+        }, 
+        "hdfs-site": {
+            "dfs.namenode.avoid.write.stale.datanode": "true", 
+            "dfs.namenode.kerberos.internal.spnego.principal": "${dfs.web.authentication.kerberos.principal}", 
+            "ipc.server.max.response.size": "5242880", 
+            "dfs.datanode.kerberos.principal": "dn/_HOST@EXAMPLE.COM", 
+            "dfs.heartbeat.interval": "3", 
+            "dfs.block.access.token.enable": "true", 
+            "dfs.support.append": "true", 
+            "dfs.cluster.administrators": " hdfs", 
+            "ambari.dfs.datanode.http.port": "1022", 
+            "dfs.block.size": "134217728", 
+            "dfs.blockreport.initialDelay": "120", 
+            "dfs.namenode.kerberos.principal": "nn/_HOST@EXAMPLE.COM", 
+            "dfs.hosts": "/etc/hadoop/conf/dfs.include", 
+            "dfs.datanode.du.reserved": "1073741824", 
+            "dfs.replication": "3", 
+            "dfs.namenode.handler.count": "100", 
+            "dfs.web.authentication.kerberos.keytab": "/etc/security/keytabs/spnego.service.keytab", 
+            "dfs.namenode.stale.datanode.interval": "30000", 
+            "dfs.datanode.socket.write.timeout": "0", 
+            "ipc.server.read.threadpool.size": "5", 
+            "dfs.balance.bandwidthPerSec": "6250000", 
+            "dfs.datanode.address": "0.0.0.0:${ambari.dfs.datanode.port}", 
+            "dfs.webhdfs.enabled": "true", 
+            "dfs.datanode.failed.volumes.tolerated": "0", 
+            "dfs.permissions.supergroup": "hdfs", 
+            "dfs.secondary.http.address": "c6402.ambari.apache.org:50090", 
+            "ambari.dfs.datanode.port": "1019", 
+            "dfs.namenode.write.stale.datanode.ratio": "1.0f", 
+            "dfs.name.dir": "/hadoop/hdfs/namenode", 
+            "dfs.access.time.precision": "0", 
+            "dfs.secondary.namenode.kerberos.internal.spnego.principal": "${dfs.web.authentication.kerberos.principal}", 
+            "dfs.https.address": "c6401.ambari.apache.org:50470", 
+            "dfs.datanode.http.address": "0.0.0.0:${ambari.dfs.datanode.http.port}", 
+            "dfs.data.dir": "/hadoop/hdfs/data", 
+            "dfs.secondary.https.port": "50490", 
+            "dfs.permissions": "true", 
+            "dfs.secondary.namenode.keytab.file": "/etc/security/keytabs/nn.service.keytab", 
+            "dfs.web.authentication.kerberos.principal": "HTTP/_HOST@EXAMPLE.COM", 
+            "dfs.block.local-path-access.user": "hbase", 
+            "dfs.datanode.ipc.address": "0.0.0.0:8010", 
+            "dfs.web.ugi": "gopher,gopher", 
+            "dfs.datanode.du.pct": "0.85f", 
+            "dfs.secondary.namenode.kerberos.principal": "nn/_HOST@EXAMPLE.COM", 
+            "dfs.datanode.keytab.file": "/etc/security/keytabs/dn.service.keytab", 
+            "dfs.http.address": "c6401.ambari.apache.org:50070", 
+            "dfs.namenode.keytab.file": "/etc/security/keytabs/nn.service.keytab", 
+            "dfs.https.port": "50070", 
+            "dfs.replication.max": "50", 
+            "dfs.datanode.max.xcievers": "4096", 
+            "dfs.namenode.avoid.read.stale.datanode": "true", 
+            "dfs.hosts.exclude": "/etc/hadoop/conf/dfs.exclude", 
+            "dfs.datanode.data.dir.perm": "750", 
+            "dfs.safemode.threshold.pct": "1.0f", 
+            "dfs.umaskmode": "077"
+        }, 
+        "hbase-site": {
+            "hbase.client.keyvalue.maxsize": "10485760", 
+            "hbase.regionserver.keytab.file": "/etc/security/keytabs/hbase.service.keytab", 
+            "hbase.hstore.compactionThreshold": "3", 
+            "hbase.zookeeper.property.clientPort": "2181", 
+            "hbase.rootdir": "hdfs://c6401.ambari.apache.org:8020/apps/hbase/data", 
+            "hbase.regionserver.handler.count": "60", 
+            "dfs.client.read.shortcircuit": "true", 
+            "hbase.bulkload.staging.dir": "/apps/hbase/staging", 
+            "hbase.regionserver.global.memstore.lowerLimit": "0.38", 
+            "hbase.master.kerberos.principal": "hbase/_HOST@EXAMPLE.COM", 
+            "hbase.hregion.memstore.block.multiplier": "2", 
+            "hbase.hregion.memstore.flush.size": "134217728", 
+            "hbase.superuser": "hbase", 
+            "hbase.coprocessor.region.classes": "org.apache.hadoop.hbase.security.token.TokenProvider,org.apache.hadoop.hbase.security.access.SecureBulkLoadEndpoint,org.apache.hadoop.hbase.security.access.AccessController", 
+            "hbase.rpc.engine": "org.apache.hadoop.hbase.ipc.SecureRpcEngine", 
+            "hbase.hregion.max.filesize": "10737418240", 
+            "hbase.regionserver.global.memstore.upperLimit": "0.4", 
+            "zookeeper.session.timeout": "60000", 
+            "hbase.tmp.dir": "/hadoop/hbase", 
+            "hbase.regionserver.kerberos.principal": "hbase/_HOST@EXAMPLE.COM", 
+            "hfile.block.cache.size": "0.40", 
+            "hbase.security.authentication": "kerberos", 
+            "hbase.zookeeper.quorum": "c6401.ambari.apache.org", 
+            "zookeeper.znode.parent": "/hbase-secure", 
+            "hbase.coprocessor.master.classes": "org.apache.hadoop.hbase.security.access.AccessController", 
+            "hbase.hstore.blockingStoreFiles": "10", 
+            "hbase.hregion.majorcompaction": "86400000", 
+            "hbase.security.authorization": "true", 
+            "hbase.master.keytab.file": "/etc/security/keytabs/hbase.service.keytab", 
+            "hbase.cluster.distributed": "true", 
+            "hbase.hregion.memstore.mslab.enabled": "true", 
+            "hbase.client.scanner.caching": "100", 
+            "hbase.zookeeper.useMulti": "true"
+        }, 
+        "core-site": {
+            "fs.default.name": "hdfs://c6401.ambari.apache.org:8020", 
+            "hadoop.proxyuser.HTTP.groups": "users", 
+            "hadoop.proxyuser.HTTP.hosts": "c6402.ambari.apache.org", 
+            "hadoop.proxyuser.hcat.hosts": "c6402.ambari.apache.org", 
+            "fs.checkpoint.period": "21600", 
+            "hadoop.proxyuser.hcat.groups": "users", 
+            "fs.checkpoint.size": "67108864", 
+            "fs.trash.interval": "360", 
+            "hadoop.proxyuser.hive.groups": "users", 
+            "io.compression.codecs": "org.apache.hadoop.io.compress.GzipCodec,org.apache.hadoop.io.compress.DefaultCodec,com.hadoop.compression.lzo.LzoCodec,com.hadoop.compression.lzo.LzopCodec,org.apache.hadoop.io.compress.SnappyCodec", 
+            "hadoop.security.authentication": "kerberos", 
+            "fs.checkpoint.edits.dir": "${fs.checkpoint.dir}", 
+            "ipc.client.idlethreshold": "8000", 
+            "io.file.buffer.size": "131072", 
+            "io.compression.codec.lzo.class": "com.hadoop.compression.lzo.LzoCodec", 
+            "io.serializations": "org.apache.hadoop.io.serializer.WritableSerialization", 
+            "webinterface.private.actions": "false", 
+            "hadoop.proxyuser.hive.hosts": "c6402.ambari.apache.org", 
+            "hadoop.proxyuser.oozie.groups": "users", 
+            "hadoop.security.authorization": "true", 
+            "fs.checkpoint.dir": "/hadoop/hdfs/namesecondary", 
+            "ipc.client.connect.max.retries": "50", 
+            "hadoop.security.auth_to_local": "RULE:[2:$1@$0](jt@.*EXAMPLE.COM)s/.*/mapred/\nRULE:[2:$1@$0](tt@.*EXAMPLE.COM)s/.*/mapred/\nRULE:[2:$1@$0](nn@.*EXAMPLE.COM)s/.*/hdfs/\nRULE:[2:$1@$0](dn@.*EXAMPLE.COM)s/.*/hdfs/\nRULE:[2:$1@$0](hbase@.*EXAMPLE.COM)s/.*/hbase/\nRULE:[2:$1@$0](hbase@.*EXAMPLE.COM)s/.*/hbase/\nRULE:[2:$1@$0](oozie@.*EXAMPLE.COM)s/.*/oozie/\nDEFAULT", 
+            "hadoop.proxyuser.oozie.hosts": "c6402.ambari.apache.org", 
+            "ipc.client.connection.maxidletime": "30000"
+        }, 
+        "hive-site": {
+            "hive.enforce.sorting": "true", 
+            "javax.jdo.option.ConnectionPassword": "!`\"' 1", 
+            "javax.jdo.option.ConnectionDriverName": "com.mysql.jdbc.Driver", 
+            "hive.optimize.bucketmapjoin.sortedmerge": "true", 
+            "fs.file.impl.disable.cache": "true", 
+            "hive.auto.convert.join.noconditionaltask": "true", 
+            "hive.server2.authentication.kerberos.principal": "hive/_HOST@EXAMPLE.COM", 
+            "hive.optimize.bucketmapjoin": "true", 
+            "hive.map.aggr": "true", 
+            "hive.security.authorization.enabled": "true", 
+            "hive.optimize.reducededuplication.min.reducer": "1", 
+            "hive.metastore.kerberos.keytab.file": "/etc/security/keytabs/hive.service.keytab", 
+            "hive.metastore.uris": "thrift://c6402.ambari.apache.org:9083", 
+            "hive.mapjoin.bucket.cache.size": "10000", 
+            "hive.auto.convert.join.noconditionaltask.size": "1000000000", 
+            "javax.jdo.option.ConnectionUserName": "hive", 
+            "hive.metastore.cache.pinobjtypes": "Table,Database,Type,FieldSchema,Order", 
+            "hive.server2.authentication": "KERBEROS", 
+            "hive.metastore.sasl.enabled": "true", 
+            "hive.metastore.warehouse.dir": "/apps/hive/warehouse", 
+            "hive.metastore.client.socket.timeout": "60", 
+            "hive.metastore.kerberos.principal": "hive/_HOST@EXAMPLE.COM", 
+            "hive.semantic.analyzer.factory.impl": "org.apache.hivealog.cli.HCatSemanticAnalyzerFactory", 
+            "hive.auto.convert.join": "true", 
+            "hive.enforce.bucketing": "true", 
+            "hive.mapred.reduce.tasks.speculative.execution": "false", 
+            "javax.jdo.option.ConnectionURL": "jdbc:mysql://c6402.ambari.apache.org/hive?createDatabaseIfNotExist=true", 
+            "hive.auto.convert.sortmerge.join": "true", 
+            "fs.hdfs.impl.disable.cache": "true", 
+            "hive.security.authorization.manager": "org.apache.hcatalog.security.HdfsAuthorizationProvider", 
+            "ambari.hive.db.schema.name": "hive", 
+            "hive.metastore.execute.setugi": "true", 
+            "hive.auto.convert.sortmerge.join.noconditionaltask": "true", 
+            "hive.server2.enable.doAs": "true", 
+            "hive.optimize.mapjoin.mapreduce": "true", 
+            "hive.server2.authentication.kerberos.keytab": "/etc/security/keytabs/hive.service.keytab"
+        },
+        "hdfs-log4j": {
+            "content": "log4jproperties\nline2"
+        },
+        "yarn-log4j": {
+            "content": "log4jproperties\nline2"
+        },
+        "hbase-log4j": {
+            "content": "log4jproperties\nline2"
+        },
+        "hive-log4j": {
+            "content": "log4jproperties\nline2"
+        },
+        "hive-exec-log4j": {
+            "content": "log4jproperties\nline2"
+        },
+        "zookeeper-log4j": {
+            "content": "log4jproperties\nline2"
+        },
+        "pig-log4j": {
+            "content": "log4jproperties\nline2"
+        },
+        "oozie-log4j": {
+            "content": "log4jproperties\nline2"
+        }
+    }, 
+    "configurationTags": {
+        "mapred-site": {
+            "tag": "version1389980437965"
+        }, 
+        "oozie-site": {
+            "tag": "version1389980437966"
+        }, 
+        "webhcat-site": {
+            "tag": "version1389980437965"
+        }, 
+        "global": {
+            "tag": "version1389980437965"
+        }, 
+        "hdfs-site": {
+            "tag": "version1389980437965"
+        }, 
+        "hbase-site": {
+            "tag": "version1389980437965"
+        }, 
+        "core-site": {
+            "tag": "version1389980437965"
+        }, 
+        "hive-site": {
+            "tag": "version1389980437965"
+        },
+        "hdfs-log4j": {
+            "tag": "version1389980437965"
+        },
+        "yarn-log4j": {
+            "tag": "version1389980437965"
+        },
+        "hbase-log4j": {
+            "tag": "version1389980437965"
+        },
+        "hive-log4j": {
+            "tag": "version1389980437965"
+        },
+        "hive-exec-log4j": {
+            "tag": "version1389980437965"
+        },
+        "zookeeper-log4j": {
+            "tag": "version1389980437965"
+        },
+        "oozie-log4j": {
+            "tag": "version1389980437965"
+        },
+        "pig-log4j": {
+            "tag": "version1389980437965"
+        }
+    }, 
+    "commandId": "4-2", 
+    "clusterHostInfo": {
+        "ambari_server_host": [
+            "c6401.ambari.apache.org"
+        ],
+        "snamenode_host": [
+            "c6402.ambari.apache.org"
+        ], 
+        "ganglia_monitor_hosts": [
+            "c6401.ambari.apache.org", 
+            "c6402.ambari.apache.org"
+        ], 
+        "nagios_server_host": [
+            "c6402.ambari.apache.org"
+        ], 
+        "hive_metastore_hosts": [
+            "c6402.ambari.apache.org"
+        ], 
+        "all_ping_ports": [
+            "8670", 
+            "8670"
+        ], 
+        "mapred_tt_hosts": [
+            "c6401.ambari.apache.org", 
+            "c6402.ambari.apache.org"
+        ], 
+        "all_hosts": [
+            "c6401.ambari.apache.org", 
+            "c6402.ambari.apache.org"
+        ], 
+        "hbase_rs_hosts": [
+            "c6401.ambari.apache.org", 
+            "c6402.ambari.apache.org"
+        ], 
+        "slave_hosts": [
+            "c6401.ambari.apache.org", 
+            "c6402.ambari.apache.org"
+        ], 
+        "namenode_host": [
+            "c6401.ambari.apache.org"
+        ], 
+        "ganglia_server_host": [
+            "c6402.ambari.apache.org"
+        ], 
+        "hbase_master_hosts": [
+            "c6401.ambari.apache.org"
+        ], 
+        "hive_mysql_host": [
+            "c6402.ambari.apache.org"
+        ], 
+        "oozie_server": [
+            "c6402.ambari.apache.org"
+        ], 
+        "webhcat_server_host": [
+            "c6402.ambari.apache.org"
+        ], 
+        "jtnode_host": [
+            "c6402.ambari.apache.org"
+        ], 
+        "zookeeper_hosts": [
+            "c6402.ambari.apache.org"
+        ], 
+        "hs_host": [
+            "c6402.ambari.apache.org"
+        ], 
+        "hive_server_host": [
+            "c6402.ambari.apache.org"
+        ]
+    }
+}
\ No newline at end of file