You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by jo...@apache.org on 2014/09/16 14:51:00 UTC

[20/27] git commit: AMBARI-7296. HCatalog and WebHCat services should not be managed as separate service (should be part of Hive service) (jaimin)

AMBARI-7296. HCatalog and WebHCat services should not be managed as separate service (should be part of Hive service) (jaimin)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/601014ed
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/601014ed
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/601014ed

Branch: refs/heads/branch-alerts-dev
Commit: 601014ed8b047c676ece41016a3278c19703794f
Parents: 610bb1e
Author: Jaimin Jetly <ja...@hortonworks.com>
Authored: Mon Sep 15 12:12:17 2014 -0700
Committer: Jaimin Jetly <ja...@hortonworks.com>
Committed: Mon Sep 15 12:12:46 2014 -0700

----------------------------------------------------------------------
 .../ambari_agent/TestActualConfigHandler.py     |   6 +-
 .../test/python/ambari_agent/TestLiveStatus.py  |   4 +-
 ambari-server/docs/api/v1/services.md           |   7 -
 .../internal/BaseBlueprintProcessor.java        |   2 +-
 .../ambari/server/metadata/ActionMetadata.java  |   1 -
 .../org/apache/ambari/server/state/Service.java |   8 +-
 .../server/upgrade/UpgradeCatalog170.java       | 190 +++++++++----
 .../custom_actions/validate_configs.py          |   2 -
 .../stacks/HDP/1.3.2/role_command_order.json    |   4 +-
 .../services/HIVE/configuration/webhcat-env.xml |  54 ++++
 .../HIVE/configuration/webhcat-site.xml         | 156 +++++++++++
 .../stacks/HDP/1.3.2/services/HIVE/metainfo.xml | 154 ++++++-----
 .../HIVE/package/files/templetonSmoke.sh        |  96 +++++++
 .../services/HIVE/package/scripts/params.py     |  31 ++-
 .../HIVE/package/scripts/service_check.py       |   2 +
 .../HIVE/package/scripts/status_params.py       |   3 +
 .../services/HIVE/package/scripts/webhcat.py    | 107 ++++++++
 .../HIVE/package/scripts/webhcat_server.py      |  53 ++++
 .../HIVE/package/scripts/webhcat_service.py     |  40 +++
 .../package/scripts/webhcat_service_check.py    |  42 +++
 .../HDP/1.3.2/services/NAGIOS/metainfo.xml      |   2 +-
 .../templates/hadoop-servicegroups.cfg.j2       |   6 -
 .../package/templates/hadoop-services.cfg.j2    |   2 +-
 .../WEBHCAT/configuration/webhcat-env.xml       |  54 ----
 .../WEBHCAT/configuration/webhcat-site.xml      | 156 -----------
 .../HDP/1.3.2/services/WEBHCAT/metainfo.xml     | 103 -------
 .../WEBHCAT/package/files/templetonSmoke.sh     |  96 -------
 .../WEBHCAT/package/scripts/__init__.py         |  20 --
 .../services/WEBHCAT/package/scripts/params.py  |  78 ------
 .../WEBHCAT/package/scripts/service_check.py    |  44 ---
 .../WEBHCAT/package/scripts/status_params.py    |  26 --
 .../services/WEBHCAT/package/scripts/webhcat.py | 107 --------
 .../WEBHCAT/package/scripts/webhcat_server.py   |  53 ----
 .../WEBHCAT/package/scripts/webhcat_service.py  |  40 ---
 .../stacks/HDP/1.3/role_command_order.json      |   4 +-
 .../stacks/HDP/1.3/services/HIVE/metainfo.xml   |   5 -
 .../stacks/HDP/2.0.6/role_command_order.json    |   4 +-
 .../services/HIVE/configuration/webhcat-env.xml |  54 ++++
 .../HIVE/configuration/webhcat-site.xml         | 138 ++++++++++
 .../stacks/HDP/2.0.6/services/HIVE/metainfo.xml | 165 ++++++-----
 .../HIVE/package/files/templetonSmoke.sh        |  96 +++++++
 .../services/HIVE/package/scripts/params.py     |  44 ++-
 .../HIVE/package/scripts/service_check.py       |   2 +
 .../HIVE/package/scripts/status_params.py       |   1 +
 .../services/HIVE/package/scripts/webhcat.py    | 112 ++++++++
 .../HIVE/package/scripts/webhcat_server.py      |  53 ++++
 .../HIVE/package/scripts/webhcat_service.py     |  40 +++
 .../package/scripts/webhcat_service_check.py    |  41 +++
 .../HDP/2.0.6/services/NAGIOS/metainfo.xml      |   2 +-
 .../templates/hadoop-servicegroups.cfg.j2       |   8 +-
 .../package/templates/hadoop-services.cfg.j2    |   2 +-
 .../WEBHCAT/configuration/webhcat-env.xml       |  54 ----
 .../WEBHCAT/configuration/webhcat-site.xml      | 138 ----------
 .../HDP/2.0.6/services/WEBHCAT/metainfo.xml     | 110 --------
 .../WEBHCAT/package/files/templetonSmoke.sh     |  96 -------
 .../WEBHCAT/package/scripts/__init__.py         |  20 --
 .../services/WEBHCAT/package/scripts/params.py  | 102 -------
 .../WEBHCAT/package/scripts/service_check.py    |  45 ---
 .../WEBHCAT/package/scripts/status_params.py    |  26 --
 .../services/WEBHCAT/package/scripts/webhcat.py | 112 --------
 .../WEBHCAT/package/scripts/webhcat_server.py   |  53 ----
 .../WEBHCAT/package/scripts/webhcat_service.py  |  40 ---
 .../HDP/2.1.GlusterFS/role_command_order.json   |   4 +-
 .../2.1.GlusterFS/services/HIVE/metainfo.xml    | 100 ++++---
 .../WEBHCAT/configuration/webhcat-site.xml      | 143 ----------
 .../2.1.GlusterFS/services/WEBHCAT/metainfo.xml |  46 ----
 .../stacks/HDP/2.1/role_command_order.json      |   3 +-
 .../stacks/HDP/2.1/services/HIVE/metainfo.xml   |  62 ++++-
 .../WEBHCAT/configuration/webhcat-site.xml      | 143 ----------
 .../HDP/2.1/services/WEBHCAT/metainfo.xml       |  47 ----
 .../stacks/HDP/2.2.1/services/HIVE/metainfo.xml |   4 -
 .../HDP/2.2.1/services/WEBHCAT/metainfo.xml     |  26 --
 .../HIVE/configuration/webhcat-site.xml         |  59 ++++
 .../stacks/HDP/2.2/services/HIVE/metainfo.xml   |  29 +-
 .../WEBHCAT/configuration/webhcat-site.xml      |  59 ----
 .../api/util/StackExtensionHelperTest.java      |  29 +-
 .../AmbariManagementControllerTest.java         | 123 ++-------
 .../internal/BaseBlueprintProcessorTest.java    |  27 +-
 .../server/upgrade/UpgradeCatalog170Test.java   | 241 +++++++++++-----
 .../1.3.2/HIVE/test_hive_service_check.py       |  20 ++
 .../stacks/1.3.2/HIVE/test_webhcat_server.py    | 258 ++++++++++++++++++
 .../stacks/1.3.2/WEBHCAT/test_webhcat_server.py | 258 ------------------
 .../1.3.2/WEBHCAT/test_webhcat_service_check.py |  61 -----
 .../2.0.6/HIVE/test_hive_service_check.py       |  21 ++
 .../stacks/2.0.6/HIVE/test_webhcat_server.py    | 270 ++++++++++++++++++
 .../stacks/2.0.6/WEBHCAT/test_webhcat_server.py | 273 -------------------
 .../2.0.6/WEBHCAT/test_webhcat_service_check.py |  61 -----
 .../stacks/HDP/2.0.5/services/HIVE/metainfo.xml |  74 +++++
 .../WEBHCAT/configuration/webhcat-site.xml      | 126 ---------
 .../HDP/2.0.5/services/WEBHCAT/metainfo.xml     | 102 -------
 .../HDP/2.0.6/services/WEBHCAT/metainfo.xml     |  28 --
 ambari-web/app/assets/data/alerts/alerts.json   |   2 +-
 .../app/assets/data/dashboard/services.json     | 117 --------
 .../app/assets/data/hosts/HDP2/hosts.json       |  11 +-
 .../data/services/HDP2/components_state.json    |   9 -
 .../app/assets/data/services/HDP2/services.json |  16 --
 .../services/host_component_actual_configs.json | 118 +-------
 .../data/stacks/HDP-2.1/recommendations.json    |   2 +-
 .../stacks/HDP-2.1/recommendations_configs.json |   2 +-
 .../data/stacks/HDP-2.1/service_components.json | 218 ++++++---------
 .../data/wizard/stack/hdp/version/1.2.0.json    |  18 --
 .../data/wizard/stack/hdp/version/1.2.1.json    |  11 -
 .../data/wizard/stack/hdp/version/1.3.0.json    |  11 -
 .../data/wizard/stack/hdp/version/1.3.1.json    |  11 -
 .../data/wizard/stack/hdp/version/2.0.1.json    |  11 -
 .../data/wizard/stack/hdp/version/2.0.5.json    |  11 -
 .../data/wizard/stack/hdp/version0.1.json       |  17 --
 .../wizard/stack/hdp/version01/HCATALOG.json    |  20 --
 .../wizard/stack/hdp/version1.2.1/HCATALOG.json |   4 -
 .../wizard/stack/hdp/version1.3.0/HCATALOG.json |   4 -
 .../wizard/stack/hdp/version131/HCATALOG.json   |   4 -
 .../wizard/stack/hdp/version2.0.1/HCATALOG.json |   4 -
 .../app/assets/data/wizard/stack/stacks.json    |  55 ----
 .../main/admin/security/add/step2.js            |   4 +-
 .../main/admin/serviceAccounts_controller.js    |   2 +-
 .../controllers/main/service/info/configs.js    |   8 +-
 ambari-web/app/controllers/main/service/item.js |   2 +-
 .../app/controllers/wizard/step7_controller.js  |   3 +-
 .../app/controllers/wizard/step8_controller.js  |   3 +-
 ambari-web/app/data/HDP2/secure_configs.js      |  15 +-
 ambari-web/app/data/HDP2/secure_mapping.js      |  12 +-
 ambari-web/app/data/HDP2/secure_properties.js   |   8 +-
 ambari-web/app/data/HDP2/site_properties.js     |  22 +-
 ambari-web/app/data/secure_configs.js           |  15 +-
 ambari-web/app/data/secure_mapping.js           |  12 +-
 ambari-web/app/data/secure_properties.js        |   8 +-
 ambari-web/app/data/site_properties.js          |  22 +-
 .../app/mixins/wizard/addSecurityConfigs.js     |   2 +-
 ambari-web/app/models/service.js                |  12 -
 ambari-web/app/models/stack_service.js          |  15 +-
 .../app/utils/batch_scheduled_requests.js       |   1 -
 ambari-web/app/utils/config.js                  |   2 +-
 ambari-web/app/utils/helper.js                  |   1 -
 .../app/views/main/host/configs_service_menu.js |   2 +-
 .../app/views/main/service/info/summary.js      |   2 +-
 ambari-web/app/views/main/service/item.js       |   2 +-
 ambari-web/app/views/main/service/menu.js       |   8 +-
 .../security/add/addSecurity_controller_test.js |   2 -
 .../main/service/info/config_test.js            |   8 +-
 .../test/controllers/wizard/step4_test.js       |  34 +--
 .../test/controllers/wizard/step8_test.js       |  14 +-
 ambari-web/test/models/service_test.js          |   8 -
 ambari-web/test/models/stack_service_test.js    |   8 -
 ambari-web/test/service_components.js           | 229 ++++++----------
 ambari-web/test/utils/helper_test.js            |   1 -
 .../src/addOns/nagios/scripts/nagios_alerts.php |   8 +-
 .../apache/ambari/msi/ClusterDefinition.java    |   2 +-
 .../ambari/msi/ClusterDefinitionTest.java       |   4 -
 .../Scripts/HostComponentsDiscovery.ps1         |   3 +-
 .../assets/data/resource/service_status.json    |  12 -
 150 files changed, 2786 insertions(+), 4419 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-agent/src/test/python/ambari_agent/TestActualConfigHandler.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/test/python/ambari_agent/TestActualConfigHandler.py b/ambari-agent/src/test/python/ambari_agent/TestActualConfigHandler.py
index ca56350..07d0171 100644
--- a/ambari-agent/src/test/python/ambari_agent/TestActualConfigHandler.py
+++ b/ambari-agent/src/test/python/ambari_agent/TestActualConfigHandler.py
@@ -34,8 +34,8 @@ class TestActualConfigHandler(TestCase):
   def setUp(self):
     LiveStatus.SERVICES = [
       "HDFS", "MAPREDUCE", "GANGLIA", "HBASE",
-      "NAGIOS", "ZOOKEEPER", "OOZIE", "HCATALOG",
-      "KERBEROS", "TEMPLETON", "HIVE", "WEBHCAT",
+      "NAGIOS", "ZOOKEEPER", "OOZIE",
+      "KERBEROS", "TEMPLETON", "HIVE",
       "YARN", "MAPREDUCE2", "FLUME", "TEZ",
       "FALCON", "STORM"
     ]
@@ -108,7 +108,7 @@ class TestActualConfigHandler(TestCase):
        "componentName" : "HIVE_METASTORE"},
       {"serviceName" : "HIVE",
        "componentName" : "MYSQL_SERVER"},
-      {"serviceName" : "WEBHCAT",
+      {"serviceName" : "HIVE",
        "componentName" : "WEBHCAT_SERVER"},
       {"serviceName" : "YARN",
        "componentName" : "RESOURCEMANAGER"},

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-agent/src/test/python/ambari_agent/TestLiveStatus.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/test/python/ambari_agent/TestLiveStatus.py b/ambari-agent/src/test/python/ambari_agent/TestLiveStatus.py
index 6937da2..9dfb47a 100644
--- a/ambari-agent/src/test/python/ambari_agent/TestLiveStatus.py
+++ b/ambari-agent/src/test/python/ambari_agent/TestLiveStatus.py
@@ -36,8 +36,8 @@ class TestLiveStatus(TestCase):
     sys.stdout = out
     LiveStatus.SERVICES = [
       "HDFS", "MAPREDUCE", "GANGLIA", "HBASE",
-      "NAGIOS", "ZOOKEEPER", "OOZIE", "HCATALOG",
-      "KERBEROS", "TEMPLETON", "HIVE", "WEBHCAT",
+      "NAGIOS", "ZOOKEEPER", "OOZIE",
+      "KERBEROS", "TEMPLETON", "HIVE",
       "YARN", "MAPREDUCE2", "FLUME", "TEZ",
       "FALCON", "STORM"
     ]

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-server/docs/api/v1/services.md
----------------------------------------------------------------------
diff --git a/ambari-server/docs/api/v1/services.md b/ambari-server/docs/api/v1/services.md
index 8a9b68d..de5c203 100644
--- a/ambari-server/docs/api/v1/services.md
+++ b/ambari-server/docs/api/v1/services.md
@@ -78,13 +78,6 @@ Get the collection of the services for the cluster named "c1".
           		}
         	},
         	{
-        		"href" : "http://your.ambari.server/api/v1/clusters/c1/services/HCATALOG",
-        		"ServiceInfo" : {
-        	  		"cluster_name" : "c1",
-        	  		"service_name" : "HCATALOG"
-        	  	}
-        	},
-        	{
         		"href" : "http://your.ambari.server/api/v1/clusters/c1/services/PIG",
         		"ServiceInfo" : {
         	  		"cluster_name" : "c1",

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BaseBlueprintProcessor.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BaseBlueprintProcessor.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BaseBlueprintProcessor.java
index 5a99af8..abd22a4 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BaseBlueprintProcessor.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BaseBlueprintProcessor.java
@@ -851,7 +851,7 @@ public abstract class BaseBlueprintProcessor extends AbstractControllerResourceP
       Collection<DependencyInfo> nagiosDependencies = getDependenciesForComponent("NAGIOS_SERVER");
       for (DependencyInfo dependency : nagiosDependencies) {
         if (dependency.getComponentName().equals("HCAT")) {
-          dependencyConditionalServiceMap.put(dependency, "HCATALOG");
+          dependencyConditionalServiceMap.put(dependency, "HIVE");
         } else if (dependency.getComponentName().equals("OOZIE_CLIENT")) {
           dependencyConditionalServiceMap.put(dependency, "OOZIE");
         } else if (dependency.getComponentName().equals("YARN_CLIENT")) {

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-server/src/main/java/org/apache/ambari/server/metadata/ActionMetadata.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/metadata/ActionMetadata.java b/ambari-server/src/main/java/org/apache/ambari/server/metadata/ActionMetadata.java
index ae13bf3..8a9799d 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/metadata/ActionMetadata.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/metadata/ActionMetadata.java
@@ -42,7 +42,6 @@ public class ActionMetadata {
       Map<String, String> serviceChecks = new HashMap<String, String>();
       
       serviceChecks.put(Service.Type.ZOOKEEPER.toString(), "ZOOKEEPER_QUORUM_SERVICE_CHECK");
-      serviceChecks.put(Service.Type.HCATALOG.toString(), "HCAT_SERVICE_CHECK");
       
       SERVICE_CHECKS = Collections.unmodifiableMap(serviceChecks);
   }

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-server/src/main/java/org/apache/ambari/server/state/Service.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/Service.java b/ambari-server/src/main/java/org/apache/ambari/server/state/Service.java
index 48fd77e..63d8760 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/Service.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/Service.java
@@ -18,14 +18,13 @@
 
 package org.apache.ambari.server.state;
 
-import java.util.Map;
-import java.util.concurrent.locks.ReadWriteLock;
-
 import com.google.inject.persist.Transactional;
-
 import org.apache.ambari.server.AmbariException;
 import org.apache.ambari.server.controller.ServiceResponse;
 
+import java.util.Map;
+import java.util.concurrent.locks.ReadWriteLock;
+
 public interface Service {
 
   public String getName();
@@ -113,7 +112,6 @@ public interface Service {
     GANGLIA,
     ZOOKEEPER,
     PIG,
-    HCATALOG,
     FLUME,
     YARN,
     MAPREDUCE2

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog170.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog170.java b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog170.java
index 4a9b83f..75635cc 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog170.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog170.java
@@ -18,74 +18,30 @@
 
 package org.apache.ambari.server.upgrade;
 
-import java.lang.reflect.Type;
-
-import java.sql.Connection;
-import java.sql.PreparedStatement;
-import java.sql.ResultSet;
-import java.sql.SQLException;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Map.Entry;
-import java.util.Set;
-import java.util.Date;
-
-import javax.persistence.EntityManager;
-import javax.persistence.TypedQuery;
-import javax.persistence.criteria.CriteriaBuilder;
-import javax.persistence.criteria.CriteriaQuery;
-import javax.persistence.criteria.Expression;
-import javax.persistence.criteria.Predicate;
-import javax.persistence.criteria.Root;
-
 import com.google.common.reflect.TypeToken;
+import com.google.inject.Inject;
+import com.google.inject.Injector;
 import org.apache.ambari.server.AmbariException;
 import org.apache.ambari.server.configuration.Configuration;
 import org.apache.ambari.server.controller.AmbariManagementController;
 import org.apache.ambari.server.orm.DBAccessor.DBColumnInfo;
-import org.apache.ambari.server.orm.dao.ClusterDAO;
-import org.apache.ambari.server.orm.dao.DaoUtils;
-import org.apache.ambari.server.orm.dao.HostRoleCommandDAO;
-import org.apache.ambari.server.orm.dao.KeyValueDAO;
-import org.apache.ambari.server.orm.dao.PermissionDAO;
-import org.apache.ambari.server.orm.dao.PrincipalDAO;
-import org.apache.ambari.server.orm.dao.PrincipalTypeDAO;
-import org.apache.ambari.server.orm.dao.PrivilegeDAO;
-import org.apache.ambari.server.orm.dao.ResourceDAO;
-import org.apache.ambari.server.orm.dao.ResourceTypeDAO;
-import org.apache.ambari.server.orm.dao.ConfigGroupConfigMappingDAO;
-import org.apache.ambari.server.orm.dao.UserDAO;
-import org.apache.ambari.server.orm.dao.ViewDAO;
-import org.apache.ambari.server.orm.dao.ViewInstanceDAO;
-import org.apache.ambari.server.orm.entities.ClusterEntity;
-import org.apache.ambari.server.orm.entities.HostRoleCommandEntity;
-import org.apache.ambari.server.orm.entities.ConfigGroupConfigMappingEntity;
-import org.apache.ambari.server.orm.entities.ClusterConfigEntity;
-import org.apache.ambari.server.orm.entities.HostRoleCommandEntity_;
-import org.apache.ambari.server.orm.entities.KeyValueEntity;
-import org.apache.ambari.server.orm.entities.PermissionEntity;
-import org.apache.ambari.server.orm.entities.PrincipalEntity;
-import org.apache.ambari.server.orm.entities.PrincipalTypeEntity;
-import org.apache.ambari.server.orm.entities.PrivilegeEntity;
-import org.apache.ambari.server.orm.entities.ResourceEntity;
-import org.apache.ambari.server.orm.entities.ResourceTypeEntity;
-import org.apache.ambari.server.orm.entities.UserEntity;
-import org.apache.ambari.server.orm.entities.ViewEntity;
-import org.apache.ambari.server.orm.entities.ViewInstanceEntity;
-import org.apache.ambari.server.state.Cluster;
-import org.apache.ambari.server.state.Clusters;
-import org.apache.ambari.server.state.Config;
-import org.apache.ambari.server.state.ConfigHelper;
+import org.apache.ambari.server.orm.dao.*;
+import org.apache.ambari.server.orm.entities.*;
+import org.apache.ambari.server.state.*;
 import org.apache.ambari.server.utils.StageUtils;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import com.google.inject.Inject;
-import com.google.inject.Injector;
+import javax.persistence.EntityManager;
+import javax.persistence.TypedQuery;
+import javax.persistence.criteria.*;
+import java.lang.reflect.Type;
+import java.sql.Connection;
+import java.sql.PreparedStatement;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.util.*;
+import java.util.Map.Entry;
 
 /**
  * Upgrade catalog for version 1.7.0.
@@ -532,6 +488,9 @@ public class UpgradeCatalog170 extends AbstractUpgradeCatalog {
   @Override
   protected void executeDMLUpdates() throws AmbariException, SQLException {
     // Update historic records with the log paths, but only enough so as to not prolong the upgrade process
+    moveHcatalogIntoHiveService();
+    moveWebHcatIntoHiveService();
+
     executeInTransaction(new Runnable() {
       @Override
       public void run() {
@@ -597,6 +556,117 @@ public class UpgradeCatalog170 extends AbstractUpgradeCatalog {
     moveConfigGroupsGlobalToEnv();
   }
 
+  public void moveHcatalogIntoHiveService() throws AmbariException {
+    final String serviceName = "HIVE";
+    final String serviceNameToBeDeleted = "HCATALOG";
+    final String componentName = "HCAT";
+    moveComponentsIntoService(serviceName, serviceNameToBeDeleted, componentName);
+  }
+
+  private void moveWebHcatIntoHiveService() throws AmbariException {
+    final String serviceName = "HIVE";
+    final String serviceNameToBeDeleted = "WEBHCAT";
+    final String componentName = "WEBHCAT_SERVER";
+    moveComponentsIntoService(serviceName, serviceNameToBeDeleted, componentName);
+  }
+
+  private void moveComponentsIntoService(String serviceName, String serviceNameToBeDeleted, String componentName) throws AmbariException {
+    /**
+     * 1. ADD servicecomponentdesiredstate: Add HCAT HIVE entry:
+     * 2. Update hostcomponentdesiredstate: service_name to HIVE where service_name is HCATALOG:
+     * 3. Update hostcomponentstate: service_name to HIVE where service_name is HCATALOG:
+     * 4. DELETE servicecomponentdesiredstate: where component_name is HCAT and service_name is HCATALOG :
+     * 5. Delete servicedesiredstate where  service_name is HCATALOG:
+     * 6. Delete clusterservices where service_name is  HCATALOG:
+     */
+    ClusterDAO clusterDAO = injector.getInstance(ClusterDAO.class);
+    ClusterServiceDAO clusterServiceDAO = injector.getInstance(ClusterServiceDAO.class);
+    ServiceDesiredStateDAO serviceDesiredStateDAO = injector.getInstance(ServiceDesiredStateDAO.class);
+    ServiceComponentDesiredStateDAO serviceComponentDesiredStateDAO = injector.getInstance(ServiceComponentDesiredStateDAO.class);
+    HostComponentDesiredStateDAO hostComponentDesiredStateDAO = injector.getInstance(HostComponentDesiredStateDAO.class);
+    HostComponentStateDAO hostComponentStateDAO = injector.getInstance(HostComponentStateDAO.class);
+
+    List<ClusterEntity> clusterEntities = clusterDAO.findAll();
+    for (final ClusterEntity clusterEntity : clusterEntities) {
+      ServiceComponentDesiredStateEntityPK pkHCATInHcatalog = new ServiceComponentDesiredStateEntityPK();
+      pkHCATInHcatalog.setComponentName(componentName);
+      pkHCATInHcatalog.setClusterId(clusterEntity.getClusterId());
+      pkHCATInHcatalog.setServiceName(serviceNameToBeDeleted);
+      ServiceComponentDesiredStateEntity serviceComponentDesiredStateEntityToDelete = serviceComponentDesiredStateDAO.findByPK(pkHCATInHcatalog);
+
+      if (serviceComponentDesiredStateEntityToDelete == null)
+        continue;
+
+      ServiceDesiredStateEntityPK serviceDesiredStateEntityPK = new ServiceDesiredStateEntityPK();
+      serviceDesiredStateEntityPK.setClusterId(clusterEntity.getClusterId());
+      serviceDesiredStateEntityPK.setServiceName(serviceNameToBeDeleted);
+      ServiceDesiredStateEntity serviceDesiredStateEntity = serviceDesiredStateDAO.findByPK(serviceDesiredStateEntityPK);
+
+      ClusterServiceEntityPK clusterServiceEntityToBeDeletedPK = new ClusterServiceEntityPK();
+      clusterServiceEntityToBeDeletedPK.setClusterId(clusterEntity.getClusterId());
+      clusterServiceEntityToBeDeletedPK.setServiceName(serviceNameToBeDeleted);
+      ClusterServiceEntity clusterServiceEntityToBeDeleted = clusterServiceDAO.findByPK(clusterServiceEntityToBeDeletedPK);
+
+      ClusterServiceEntityPK clusterServiceEntityPK = new ClusterServiceEntityPK();
+      clusterServiceEntityPK.setClusterId(clusterEntity.getClusterId());
+      clusterServiceEntityPK.setServiceName(serviceName);
+
+
+      ClusterServiceEntity clusterServiceEntity = clusterServiceDAO.findByPK(clusterServiceEntityPK);
+
+      ServiceComponentDesiredStateEntity serviceComponentDesiredStateEntity = new ServiceComponentDesiredStateEntity();
+      serviceComponentDesiredStateEntity.setServiceName(serviceName);
+      serviceComponentDesiredStateEntity.setComponentName(serviceComponentDesiredStateEntityToDelete.getComponentName());
+      serviceComponentDesiredStateEntity.setClusterId(clusterEntity.getClusterId());
+      serviceComponentDesiredStateEntity.setDesiredStackVersion(serviceComponentDesiredStateEntityToDelete.getDesiredStackVersion());
+      serviceComponentDesiredStateEntity.setDesiredState(serviceComponentDesiredStateEntityToDelete.getDesiredState());
+      serviceComponentDesiredStateEntity.setClusterServiceEntity(clusterServiceEntity);
+      //serviceComponentDesiredStateDAO.create(serviceComponentDesiredStateEntity);
+
+      Iterator<HostComponentDesiredStateEntity> hostComponentDesiredStateIterator = serviceComponentDesiredStateEntityToDelete.getHostComponentDesiredStateEntities().iterator();
+      Iterator<HostComponentStateEntity> hostComponentStateIterator = serviceComponentDesiredStateEntityToDelete.getHostComponentStateEntities().iterator();
+
+      while (hostComponentDesiredStateIterator.hasNext()) {
+        HostComponentDesiredStateEntity hcDesiredStateEntityToBeDeleted = hostComponentDesiredStateIterator.next();
+        HostComponentDesiredStateEntity hostComponentDesiredStateEntity = new HostComponentDesiredStateEntity();
+        hostComponentDesiredStateEntity.setClusterId(clusterEntity.getClusterId());
+        hostComponentDesiredStateEntity.setComponentName(hcDesiredStateEntityToBeDeleted.getComponentName());
+        hostComponentDesiredStateEntity.setDesiredStackVersion(hcDesiredStateEntityToBeDeleted.getDesiredStackVersion());
+        hostComponentDesiredStateEntity.setDesiredState(hcDesiredStateEntityToBeDeleted.getDesiredState());
+        hostComponentDesiredStateEntity.setHostName(hcDesiredStateEntityToBeDeleted.getHostName());
+        hostComponentDesiredStateEntity.setHostEntity(hcDesiredStateEntityToBeDeleted.getHostEntity());
+        hostComponentDesiredStateEntity.setAdminState(hcDesiredStateEntityToBeDeleted.getAdminState());
+        hostComponentDesiredStateEntity.setMaintenanceState(hcDesiredStateEntityToBeDeleted.getMaintenanceState());
+        hostComponentDesiredStateEntity.setRestartRequired(hcDesiredStateEntityToBeDeleted.isRestartRequired());
+        hostComponentDesiredStateEntity.setServiceName(serviceName);
+        hostComponentDesiredStateEntity.setServiceComponentDesiredStateEntity(serviceComponentDesiredStateEntity);
+        hostComponentDesiredStateDAO.merge(hostComponentDesiredStateEntity);
+        hostComponentDesiredStateDAO.remove(hcDesiredStateEntityToBeDeleted);
+      }
+
+      while (hostComponentStateIterator.hasNext()) {
+        HostComponentStateEntity hcStateToBeDeleted = hostComponentStateIterator.next();
+        HostComponentStateEntity hostComponentStateEntity = new HostComponentStateEntity();
+        hostComponentStateEntity.setClusterId(clusterEntity.getClusterId());
+        hostComponentStateEntity.setComponentName(hcStateToBeDeleted.getComponentName());
+        hostComponentStateEntity.setCurrentStackVersion(hcStateToBeDeleted.getCurrentStackVersion());
+        hostComponentStateEntity.setCurrentState(hcStateToBeDeleted.getCurrentState());
+        hostComponentStateEntity.setHostName(hcStateToBeDeleted.getHostName());
+        hostComponentStateEntity.setHostEntity(hcStateToBeDeleted.getHostEntity());
+        hostComponentStateEntity.setServiceName(serviceName);
+        hostComponentStateEntity.setServiceComponentDesiredStateEntity(serviceComponentDesiredStateEntity);
+        hostComponentStateDAO.merge(hostComponentStateEntity);
+        hostComponentStateDAO.remove(hcStateToBeDeleted);
+      }
+      serviceComponentDesiredStateEntity.setClusterServiceEntity(clusterServiceEntity);
+      serviceComponentDesiredStateDAO.merge(serviceComponentDesiredStateEntity);
+      serviceComponentDesiredStateDAO.remove(serviceComponentDesiredStateEntityToDelete);
+      serviceDesiredStateDAO.remove(serviceDesiredStateEntity);
+      clusterServiceDAO.remove(clusterServiceEntityToBeDeleted);
+    }
+  }
+
+
   private void moveConfigGroupsGlobalToEnv() throws AmbariException {
     final ConfigGroupConfigMappingDAO confGroupConfMappingDAO = injector.getInstance(ConfigGroupConfigMappingDAO.class);
     ConfigHelper configHelper = injector.getInstance(ConfigHelper.class);
@@ -905,7 +975,7 @@ public class UpgradeCatalog170 extends AbstractUpgradeCatalog {
     if (clusterMap != null && !clusterMap.isEmpty()) {
       for (final Cluster cluster : clusterMap.values()) {
         Set<String> configTypes = configHelper.findConfigTypesByPropertyName(cluster.getCurrentStackVersion(),
-                CONTENT_FIELD_NAME, cluster.getClusterName());
+            CONTENT_FIELD_NAME, cluster.getClusterName());
 
         for(String configType:configTypes) {
           if(!configType.endsWith(ENV_CONFIGS_POSTFIX)) {

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-server/src/main/resources/custom_actions/validate_configs.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/custom_actions/validate_configs.py b/ambari-server/src/main/resources/custom_actions/validate_configs.py
index 295e74a..c245dbb 100644
--- a/ambari-server/src/main/resources/custom_actions/validate_configs.py
+++ b/ambari-server/src/main/resources/custom_actions/validate_configs.py
@@ -131,7 +131,6 @@ PROPERTIES_TO_CHECK = {
   "HIVE_CLIENT": {
     "hive-env": ["hive_log_dir", "hive_pid_dir"]
   },
-  #HCATALOG
   "HCAT": {
     "hive-env": ["hcat_log_dir", "hcat_pid_dir"]
   },
@@ -297,7 +296,6 @@ USERS_TO_GROUP_MAPPING = {
       "hive_user": "hive_user"
     }
   },
-  #HCATALOG
   "HCAT": {
     "hive-env": {
       "hive_user": "hive_user"

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-server/src/main/resources/stacks/HDP/1.3.2/role_command_order.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/role_command_order.json b/ambari-server/src/main/resources/stacks/HDP/1.3.2/role_command_order.json
index a05324f..67eeff3 100644
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/role_command_order.json
+++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/role_command_order.json
@@ -20,10 +20,8 @@
         "WEBHCAT_SERVER-START"],
     "MAPREDUCE_SERVICE_CHECK-SERVICE_CHECK": ["JOBTRACKER-START", "TASKTRACKER-START"],
     "OOZIE_SERVICE_CHECK-SERVICE_CHECK": ["OOZIE_SERVER-START"],
-    "WEBHCAT_SERVICE_CHECK-SERVICE_CHECK": ["WEBHCAT_SERVER-START"],
     "HBASE_SERVICE_CHECK-SERVICE_CHECK": ["HBASE_MASTER-START", "HBASE_REGIONSERVER-START"],
-    "HIVE_SERVICE_CHECK-SERVICE_CHECK": ["HIVE_SERVER-START", "HIVE_METASTORE-START"],
-    "HCAT_SERVICE_CHECK-SERVICE_CHECK": ["HIVE_SERVER-START"],
+    "HIVE_SERVICE_CHECK-SERVICE_CHECK": ["HIVE_SERVER-START", "HIVE_METASTORE-START", "WEBHCAT_SERVER-START"],
     "PIG_SERVICE_CHECK-SERVICE_CHECK": ["JOBTRACKER-START", "TASKTRACKER-START"],
     "SQOOP_SERVICE_CHECK-SERVICE_CHECK": ["JOBTRACKER-START", "TASKTRACKER-START"],
     "ZOOKEEPER_SERVICE_CHECK-SERVICE_CHECK": ["ZOOKEEPER_SERVER-START"],

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/configuration/webhcat-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/configuration/webhcat-env.xml b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/configuration/webhcat-env.xml
new file mode 100644
index 0000000..f0679e5
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/configuration/webhcat-env.xml
@@ -0,0 +1,54 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+<!--
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+-->
+
+<configuration>
+  <!-- webhcat-env.sh -->
+  <property>
+    <name>content</name>
+    <description>webhcat-env.sh content</description>
+    <value>
+# The file containing the running pid
+PID_FILE={{webhcat_pid_file}}
+
+TEMPLETON_LOG_DIR={{templeton_log_dir}}/
+
+
+WEBHCAT_LOG_DIR={{templeton_log_dir}}/
+
+# The console error log
+ERROR_LOG={{templeton_log_dir}}/webhcat-console-error.log
+
+# The console log
+CONSOLE_LOG={{templeton_log_dir}}/webhcat-console.log
+
+#TEMPLETON_JAR=templeton_jar_name
+
+#HADOOP_PREFIX=hadoop_prefix
+
+#HCAT_PREFIX=hive_prefix
+
+# Set HADOOP_HOME to point to a specific hadoop install directory
+export HADOOP_HOME=/usr/lib/hadoop
+    </value>
+  </property>
+  
+</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/configuration/webhcat-site.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/configuration/webhcat-site.xml b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/configuration/webhcat-site.xml
new file mode 100644
index 0000000..b87ca7c
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/configuration/webhcat-site.xml
@@ -0,0 +1,156 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!-- 
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+-->
+
+<!-- The default settings for Templeton. -->
+<!-- Edit templeton-site.xml to change settings for your local -->
+<!-- install. -->
+
+<configuration supports_final="true">
+
+  <property>
+    <name>templeton.port</name>
+      <value>50111</value>
+    <description>The HTTP port for the main server.</description>
+  </property>
+
+  <property>
+    <name>templeton.hadoop.conf.dir</name>
+    <value>/etc/hadoop/conf</value>
+    <description>The path to the Hadoop configuration.</description>
+  </property>
+
+  <property>
+    <name>templeton.jar</name>
+    <value>/usr/lib/hcatalog/share/webhcat/svr/webhcat.jar</value>
+    <description>The path to the Templeton jar file.</description>
+  </property>
+
+  <property>
+    <name>templeton.libjars</name>
+    <value>/usr/lib/zookeeper/zookeeper.jar</value>
+    <description>Jars to add the the classpath.</description>
+  </property>
+
+
+  <property>
+    <name>templeton.hadoop</name>
+    <value>/usr/bin/hadoop</value>
+    <description>The path to the Hadoop executable.</description>
+  </property>
+
+  <property>
+    <name>templeton.pig.archive</name>
+    <value>hdfs:///apps/webhcat/pig.tar.gz</value>
+    <description>The path to the Pig archive.</description>
+  </property>
+
+  <property>
+    <name>templeton.pig.path</name>
+    <value>pig.tar.gz/pig/bin/pig</value>
+    <description>The path to the Pig executable.</description>
+  </property>
+
+  <property>
+    <name>templeton.hcat</name>
+    <value>/usr/bin/hcat</value>
+    <description>The path to the hcatalog executable.</description>
+  </property>
+
+  <property>
+    <name>templeton.hive.archive</name>
+    <value>hdfs:///apps/webhcat/hive.tar.gz</value>
+    <description>The path to the Hive archive.</description>
+  </property>
+
+  <property>
+    <name>templeton.hive.path</name>
+    <value>hive.tar.gz/hive/bin/hive</value>
+    <description>The path to the Hive executable.</description>
+  </property>
+
+  <property>
+    <name>templeton.hive.properties</name>
+    <value>hive.metastore.local=false, hive.metastore.uris=thrift://localhost:9933, hive.metastore.sasl.enabled=false</value>
+    <description>Properties to set when running hive.</description>
+  </property>
+
+
+  <property>
+    <name>templeton.zookeeper.hosts</name>
+    <value>localhost:2181</value>
+    <description>ZooKeeper servers, as comma separated host:port pairs</description>
+  </property>
+
+  <property>
+    <name>templeton.storage.class</name>
+    <value>org.apache.hcatalog.templeton.tool.ZooKeeperStorage</value>
+    <description>The class to use as storage</description>
+  </property>
+
+  <property>
+   <name>templeton.override.enabled</name>
+   <value>false</value>
+   <description>
+     Enable the override path in templeton.override.jars
+   </description>
+ </property>
+
+ <property>
+    <name>templeton.streaming.jar</name>
+    <value>hdfs:///apps/webhcat/hadoop-streaming.jar</value>
+    <description>The hdfs path to the Hadoop streaming jar file.</description>
+  </property> 
+
+  <property>
+    <name>templeton.exec.timeout</name>
+    <value>60000</value>
+    <description>Time out for templeton api</description>
+  </property>
+
+  <!-- webhcat-env.sh -->
+  <property>
+    <name>content</name>
+    <description>webhcat-env.sh content</description>
+    <value>
+# The file containing the running pid
+PID_FILE={{pid_file}}
+
+TEMPLETON_LOG_DIR={{templeton_log_dir}}/
+
+
+WEBHCAT_LOG_DIR={{templeton_log_dir}}/
+
+# The console error log
+ERROR_LOG={{templeton_log_dir}}/webhcat-console-error.log
+
+# The console log
+CONSOLE_LOG={{templeton_log_dir}}/webhcat-console.log
+
+#TEMPLETON_JAR=templeton_jar_name
+
+#HADOOP_PREFIX=hadoop_prefix
+
+#HCAT_PREFIX=hive_prefix
+
+# Set HADOOP_HOME to point to a specific hadoop install directory
+export HADOOP_HOME=/usr/lib/hadoop
+    </value>
+  </property>
+  
+</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/metainfo.xml
index 2c58c44..c78de04 100644
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/metainfo.xml
@@ -81,6 +81,49 @@
         </component>
 
         <component>
+          <name>WEBHCAT_SERVER</name>
+          <displayName>WebHCat Server</displayName>
+          <category>MASTER</category>
+          <cardinality>1</cardinality>
+          <dependencies>
+            <dependency>
+              <name>HDFS/HDFS_CLIENT</name>
+              <scope>host</scope>
+              <auto-deploy>
+                <enabled>true</enabled>
+              </auto-deploy>
+            </dependency>
+            <dependency>
+              <name>MAPREDUCE/MAPREDUCE_CLIENT</name>
+              <scope>host</scope>
+              <auto-deploy>
+                <enabled>true</enabled>
+              </auto-deploy>
+            </dependency>
+            <dependency>
+              <name>ZOOKEEPER/ZOOKEEPER_SERVER</name>
+              <scope>cluster</scope>
+              <auto-deploy>
+                <enabled>true</enabled>
+                <co-locate>HIVE/WEBHCAT_SERVER</co-locate>
+              </auto-deploy>
+            </dependency>
+            <dependency>
+              <name>ZOOKEEPER/ZOOKEEPER_CLIENT</name>
+              <scope>host</scope>
+              <auto-deploy>
+                <enabled>true</enabled>
+              </auto-deploy>
+            </dependency>
+          </dependencies>
+          <commandScript>
+            <script>scripts/webhcat_server.py</script>
+            <scriptType>PYTHON</scriptType>
+            <timeout>600</timeout>
+          </commandScript>
+        </component>
+
+        <component>
           <name>HIVE_CLIENT</name>
           <displayName>Hive Client</displayName>
           <category>CLIENT</category>
@@ -112,68 +155,6 @@
             </configFile>            
           </configFiles>
         </component>
-      </components>
-
-      <osSpecifics>
-        <osSpecific>
-          <osFamily>any</osFamily>
-          <packages>
-            <package>
-              <name>hive</name>
-            </package>
-            <package>
-              <name>mysql-connector-java</name>
-            </package>
-            <package>
-              <name>mysql</name>
-            </package>
-          </packages>
-        </osSpecific>
-        <osSpecific>
-          <osFamily>redhat5,redhat6</osFamily>
-          <packages>
-            <package>
-              <name>mysql-server</name>
-            </package>
-          </packages>
-        </osSpecific>
-        <osSpecific>
-          <osFamily>suse11</osFamily>
-          <packages>
-            <package>
-              <name>mysql-client</name>
-            </package>
-          </packages>
-        </osSpecific>
-      </osSpecifics>
-
-      <commandScript>
-        <script>scripts/service_check.py</script>
-        <scriptType>PYTHON</scriptType>
-        <timeout>300</timeout>
-      </commandScript>
-      
-      <requiredServices>
-        <service>MAPREDUCE</service>
-        <service>ZOOKEEPER</service>
-      </requiredServices>
-
-      <configuration-dependencies>
-        <config-type>hive-site</config-type>
-        <config-type>hive-env</config-type>
-        <config-type>hive-log4j</config-type>
-        <config-type>hive-exec-log4j</config-type>
-      </configuration-dependencies>
-    </service>
-
-    <service>
-      <name>HCATALOG</name>
-      <displayName>HCatalog</displayName>
-      <comment>A table and storage management layer for Hadoop that enables users with different data processing tools
-        to more easily read and write data on the grid.
-      </comment>
-      <version>0.11.0.1.3.3.0</version>
-      <components>
         <component>
           <name>HCAT</name>
           <displayName>HCat</displayName>
@@ -206,16 +187,49 @@
           </configFiles>
         </component>
       </components>
+
       <osSpecifics>
         <osSpecific>
           <osFamily>any</osFamily>
           <packages>
             <package>
+              <name>hive</name>
+            </package>
+            <package>
               <name>hcatalog</name>
             </package>
+            <package>
+              <name>webhcat-tar-hive</name>
+            </package>
+            <package>
+              <name>webhcat-tar-pig</name>
+            </package>
+            <package>
+              <name>mysql-connector-java</name>
+            </package>
+            <package>
+              <name>mysql</name>
+            </package>
+          </packages>
+        </osSpecific>
+        <osSpecific>
+          <osFamily>redhat5,redhat6</osFamily>
+          <packages>
+            <package>
+              <name>mysql-server</name>
+            </package>
+          </packages>
+        </osSpecific>
+        <osSpecific>
+          <osFamily>suse11</osFamily>
+          <packages>
+            <package>
+              <name>mysql-client</name>
+            </package>
           </packages>
         </osSpecific>
       </osSpecifics>
+
       <commandScript>
         <script>scripts/service_check.py</script>
         <scriptType>PYTHON</scriptType>
@@ -223,22 +237,18 @@
       </commandScript>
       
       <requiredServices>
-        <service>HIVE</service>
+        <service>MAPREDUCE</service>
+        <service>ZOOKEEPER</service>
       </requiredServices>
 
       <configuration-dependencies>
         <config-type>hive-site</config-type>
         <config-type>hive-env</config-type>
-      </configuration-dependencies>
-
-      <excluded-config-types>
-        <config-type>hive-env</config-type>
-        <config-type>hive-site</config-type>
-        <config-type>hive-exec-log4j</config-type>
         <config-type>hive-log4j</config-type>
-      </excluded-config-types>
-
+        <config-type>hive-exec-log4j</config-type>
+        <config-type>webhcat-site</config-type>
+        <config-type>webhcat-env</config-type>
+      </configuration-dependencies>
     </service>
-
   </services>
 </metainfo>

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/files/templetonSmoke.sh
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/files/templetonSmoke.sh b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/files/templetonSmoke.sh
new file mode 100644
index 0000000..21204e6
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/files/templetonSmoke.sh
@@ -0,0 +1,96 @@
+#!/bin/sh
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+
+export ttonhost=$1
+export smoke_test_user=$2
+export smoke_user_keytab=$3
+export security_enabled=$4
+export kinit_path_local=$5
+export ttonurl="http://${ttonhost}:50111/templeton/v1"
+
+if [[ $security_enabled == "true" ]]; then
+  kinitcmd="${kinit_path_local}  -kt ${smoke_user_keytab} ${smoke_test_user}; "
+else
+  kinitcmd=""
+fi
+
+export no_proxy=$ttonhost
+cmd="${kinitcmd}curl --negotiate -u : -s -w 'http_code <%{http_code}>'    $ttonurl/status 2>&1"
+retVal=`su - ${smoke_test_user} -c "$cmd"`
+httpExitCode=`echo $retVal |sed 's/.*http_code <\([0-9]*\)>.*/\1/'`
+
+if [[ "$httpExitCode" -ne "200" ]] ; then
+  echo "Templeton Smoke Test (status cmd): Failed. : $retVal"
+  export TEMPLETON_EXIT_CODE=1
+  exit 1
+fi
+
+exit 0
+
+#try hcat ddl command
+echo "user.name=${smoke_test_user}&exec=show databases;" /tmp/show_db.post.txt
+cmd="${kinitcmd}curl --negotiate -u : -s -w 'http_code <%{http_code}>' -d  \@${destdir}/show_db.post.txt  $ttonurl/ddl 2>&1"
+retVal=`su - ${smoke_test_user} -c "$cmd"`
+httpExitCode=`echo $retVal |sed 's/.*http_code <\([0-9]*\)>.*/\1/'`
+
+if [[ "$httpExitCode" -ne "200" ]] ; then
+  echo "Templeton Smoke Test (ddl cmd): Failed. : $retVal"
+  export TEMPLETON_EXIT_CODE=1
+  exit  1
+fi
+
+# NOT SURE?? SUHAS
+if [[ $security_enabled == "true" ]]; then
+  echo "Templeton Pig Smoke Tests not run in secure mode"
+  exit 0
+fi
+
+#try pig query
+outname=${smoke_test_user}.`date +"%M%d%y"`.$$;
+ttonTestOutput="/tmp/idtest.${outname}.out";
+ttonTestInput="/tmp/idtest.${outname}.in";
+ttonTestScript="idtest.${outname}.pig"
+
+echo "A = load '$ttonTestInput' using PigStorage(':');"  > /tmp/$ttonTestScript
+echo "B = foreach A generate \$0 as id; " >> /tmp/$ttonTestScript
+echo "store B into '$ttonTestOutput';" >> /tmp/$ttonTestScript
+
+#copy pig script to hdfs
+su - ${smoke_test_user} -c "hadoop dfs -copyFromLocal /tmp/$ttonTestScript /tmp/$ttonTestScript"
+
+#copy input file to hdfs
+su - ${smoke_test_user} -c "hadoop dfs -copyFromLocal /etc/passwd $ttonTestInput"
+
+#create, copy post args file
+echo -n "user.name=${smoke_test_user}&file=/tmp/$ttonTestScript" > /tmp/pig_post.txt
+
+#submit pig query
+cmd="curl -s -w 'http_code <%{http_code}>' -d  \@${destdir}/pig_post.txt  $ttonurl/pig 2>&1"
+retVal=`su - ${smoke_test_user} -c "$cmd"`
+httpExitCode=`echo $retVal |sed 's/.*http_code <\([0-9]*\)>.*/\1/'`
+if [[ "$httpExitCode" -ne "200" ]] ; then
+  echo "Templeton Smoke Test (pig cmd): Failed. : $retVal"
+  export TEMPLETON_EXIT_CODE=1
+  exit 1
+fi
+
+exit 0

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/params.py
index e38f059..2285cac 100644
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/params.py
@@ -64,7 +64,6 @@ smoke_user_keytab = config['configurations']['cluster-env']['smokeuser_keytab']
 
 security_enabled = config['configurations']['cluster-env']['security_enabled']
 
-kinit_path_local = functions.get_kinit_path(["/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
 hive_metastore_keytab_path =  config['configurations']['hive-site']['hive.metastore.kerberos.keytab.file']
 
 #hive_env
@@ -155,11 +154,37 @@ hive_hdfs_user_dir = format("/user/{hive_user}")
 hive_hdfs_user_mode = 0700
 #for create_hdfs_directory
 hostname = config["hostname"]
-hadoop_conf_dir = "/etc/hadoop/conf"
 hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab']
-hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
 hdfs_principal_name = config['configurations']['hadoop-env']['hdfs_principal_name']
 kinit_path_local = functions.get_kinit_path(["/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
+
+#################################################
+################## WebHCat ######################
+#################################################
+webhcat_env_sh_template = config['configurations']['webhcat-env']['content']
+
+config_dir = '/etc/hcatalog/conf'
+
+templeton_log_dir = config['configurations']['hive-env']['hcat_log_dir']
+templeton_pid_dir = status_params.templeton_pid_dir
+
+webhcat_pid_file = status_params.webhcat_pid_file
+
+templeton_jar = config['configurations']['webhcat-site']['templeton.jar']
+
+
+webhcat_server_host = config['clusterHostInfo']['webhcat_server_host']
+
+webhcat_apps_dir = "/apps/webhcat"
+
+#hdfs directories
+hcat_hdfs_user_dir = format("/user/{hcat_user}")
+hcat_hdfs_user_mode = 0755
+webhcat_hdfs_user_dir = format("/user/{webhcat_user}")
+webhcat_hdfs_user_mode = 0755
+#for create_hdfs_directory
+security_param = "true" if security_enabled else "false"
+
 import functools
 #create partial functions with common arguments for every HdfsDirectory call
 #to create hdfs directory we need to call params.HdfsDirectory in code

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/service_check.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/service_check.py
index 09ba1bf..d7b10eb 100644
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/service_check.py
+++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/service_check.py
@@ -23,6 +23,7 @@ import socket
 import sys
 
 from hcat_service_check import hcat_service_check
+from webhcat_service_check import webhcat_service_check
 
 class HiveServiceCheck(Script):
   def service_check(self, env):
@@ -42,6 +43,7 @@ class HiveServiceCheck(Script):
       sys.exit(1)
 
     hcat_service_check()
+    webhcat_service_check()
 
 if __name__ == "__main__":
   HiveServiceCheck().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/status_params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/status_params.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/status_params.py
index f371bee..7c1af00 100644
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/status_params.py
+++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/status_params.py
@@ -29,6 +29,9 @@ hive_metastore_pid = 'hive.pid'
 
 hcat_pid_dir = config['configurations']['hive-env']['hcat_pid_dir'] #hcat_pid_dir
 
+templeton_pid_dir = config['configurations']['hive-env']['hcat_pid_dir']
+webhcat_pid_file = format('{templeton_pid_dir}/webhcat.pid')
+
 if System.get_instance().os_family == "suse":
   daemon_name = 'mysql'
 else:

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/webhcat.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/webhcat.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/webhcat.py
new file mode 100644
index 0000000..037cdb5
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/webhcat.py
@@ -0,0 +1,107 @@
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+Ambari Agent
+
+"""
+from resource_management import *
+
+
+def webhcat():
+  import params
+
+  if params.hcat_hdfs_user_dir != params.webhcat_hdfs_user_dir:
+    params.HdfsDirectory(params.hcat_hdfs_user_dir,
+                         action="create_delayed",
+                         owner=params.hcat_user,
+                         mode=params.hcat_hdfs_user_mode
+    )
+  params.HdfsDirectory(params.webhcat_hdfs_user_dir,
+                       action="create_delayed",
+                       owner=params.webhcat_user,
+                       mode=params.webhcat_hdfs_user_mode
+  )
+  params.HdfsDirectory(params.webhcat_apps_dir,
+                       action="create_delayed",
+                       owner=params.webhcat_user,
+                       mode=0755
+  )
+  params.HdfsDirectory(None, action="create")
+  Directory(params.templeton_pid_dir,
+            owner=params.webhcat_user,
+            mode=0755,
+            group=params.user_group,
+            recursive=True)
+
+  Directory(params.templeton_log_dir,
+            owner=params.webhcat_user,
+            mode=0755,
+            group=params.user_group,
+            recursive=True)
+
+  Directory(params.config_dir,
+            owner=params.webhcat_user,
+            group=params.user_group)
+
+  XmlConfig("webhcat-site.xml",
+            conf_dir=params.config_dir,
+            configurations=params.config['configurations']['webhcat-site'],
+            configuration_attributes=params.config['configuration_attributes']['webhcat-site'],
+            owner=params.webhcat_user,
+            group=params.user_group,
+  )
+
+  File(format("{config_dir}/webhcat-env.sh"),
+       owner=params.webhcat_user,
+       group=params.user_group,
+       content=InlineTemplate(params.webhcat_env_sh_template)
+  )
+
+  if params.security_enabled:
+    kinit_if_needed = format("{kinit_path_local} -kt {hdfs_user_keytab} {hdfs_user};")
+  else:
+    kinit_if_needed = ""
+
+  if kinit_if_needed:
+    Execute(kinit_if_needed,
+            user=params.webhcat_user,
+            path='/bin'
+    )
+
+  CopyFromLocal('/usr/lib/hadoop/contrib/streaming/hadoop-streaming*.jar',
+                owner=params.webhcat_user,
+                mode=0755,
+                dest_dir=params.webhcat_apps_dir,
+                kinnit_if_needed=kinit_if_needed,
+                hdfs_user=params.hdfs_user
+  )
+
+  CopyFromLocal('/usr/share/HDP-webhcat/pig.tar.gz',
+                owner=params.webhcat_user,
+                mode=0755,
+                dest_dir=params.webhcat_apps_dir,
+                kinnit_if_needed=kinit_if_needed,
+                hdfs_user=params.hdfs_user
+  )
+
+  CopyFromLocal('/usr/share/HDP-webhcat/hive.tar.gz',
+                owner=params.webhcat_user,
+                mode=0755,
+                dest_dir=params.webhcat_apps_dir,
+                kinnit_if_needed=kinit_if_needed,
+                hdfs_user=params.hdfs_user
+  )

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/webhcat_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/webhcat_server.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/webhcat_server.py
new file mode 100644
index 0000000..088cb41
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/webhcat_server.py
@@ -0,0 +1,53 @@
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+Ambari Agent
+
+"""
+import sys
+from resource_management import *
+
+from webhcat import webhcat
+from webhcat_service import webhcat_service
+
+class WebHCatServer(Script):
+  def install(self, env):
+    self.install_packages(env)
+  def configure(self, env):
+    import params
+    env.set_params(params)
+    webhcat()
+
+  def start(self, env):
+    import params
+    env.set_params(params)
+    self.configure(env) # FOR SECURITY
+    webhcat_service(action = 'start')
+
+  def stop(self, env):
+    import params
+    env.set_params(params)
+
+    webhcat_service(action = 'stop')
+
+  def status(self, env):
+    import status_params
+    env.set_params(status_params)
+    check_process_status(status_params.webhcat_pid_file)
+
+if __name__ == "__main__":
+  WebHCatServer().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/webhcat_service.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/webhcat_service.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/webhcat_service.py
new file mode 100644
index 0000000..99c49a6
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/webhcat_service.py
@@ -0,0 +1,40 @@
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+Ambari Agent
+
+"""
+from resource_management import *
+
+def webhcat_service(action='start'):
+  import params
+
+  cmd = format('env HADOOP_HOME={hadoop_home} /usr/lib/hcatalog/sbin/webhcat_server.sh')
+
+  if action == 'start':
+    demon_cmd = format('{cmd} start')
+    no_op_test = format('ls {webhcat_pid_file} >/dev/null 2>&1 && ps `cat {webhcat_pid_file}` >/dev/null 2>&1')
+    Execute(demon_cmd,
+            user=params.webhcat_user,
+            not_if=no_op_test
+    )
+  elif action == 'stop':
+    demon_cmd = format('{cmd} stop')
+    Execute(demon_cmd,
+            user=params.webhcat_user
+    )
+    Execute(format('rm -f {webhcat_pid_file}'))

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/webhcat_service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/webhcat_service_check.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/webhcat_service_check.py
new file mode 100644
index 0000000..1352e0b
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/webhcat_service_check.py
@@ -0,0 +1,42 @@
+#!/usr/bin/env python
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+
+from resource_management import *
+
+def webhcat_service_check():
+  import params
+  File(format("{tmp_dir}/templetonSmoke.sh"),
+       content= StaticFile('templetonSmoke.sh'),
+       mode=0755
+  )
+
+  cmd = format("{tmp_dir}/templetonSmoke.sh {webhcat_server_host[0]} {smokeuser} {smokeuser_keytab}"
+               " {security_param} {kinit_path_local}",
+               smokeuser_keytab=params.smoke_user_keytab if params.security_enabled else "no_keytab")
+
+  Execute(cmd,
+          tries=3,
+          try_sleep=5,
+          path='/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin',
+          logoutput=True)
+
+
+
+

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/NAGIOS/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/NAGIOS/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/NAGIOS/metainfo.xml
index c926429..137ab71 100644
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/NAGIOS/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/NAGIOS/metainfo.xml
@@ -52,7 +52,7 @@
               </auto-deploy>
             </dependency>
             <dependency>
-              <name>HCATALOG/HCAT</name>
+              <name>HIVE/HCAT</name>
               <scope>host</scope>
               <auto-deploy>
                 <enabled>true</enabled>

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/NAGIOS/package/templates/hadoop-servicegroups.cfg.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/NAGIOS/package/templates/hadoop-servicegroups.cfg.j2 b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/NAGIOS/package/templates/hadoop-servicegroups.cfg.j2
index c8f2d87..3833b15 100644
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/NAGIOS/package/templates/hadoop-servicegroups.cfg.j2
+++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/NAGIOS/package/templates/hadoop-servicegroups.cfg.j2
@@ -69,12 +69,6 @@ define servicegroup {
   alias  OOZIE Checks
 }
 {% endif %}
-{% if hostgroup_defs['webhcat-server'] %}
-define servicegroup {
-  servicegroup_name  WEBHCAT
-  alias  WEBHCAT Checks
-}
-{% endif %}
 {% if hostgroup_defs['nagios-server'] %}
 define servicegroup {
   servicegroup_name  NAGIOS

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/NAGIOS/package/templates/hadoop-services.cfg.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/NAGIOS/package/templates/hadoop-services.cfg.j2 b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/NAGIOS/package/templates/hadoop-services.cfg.j2
index 1a68bfd..969c0f6 100644
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/NAGIOS/package/templates/hadoop-services.cfg.j2
+++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/NAGIOS/package/templates/hadoop-services.cfg.j2
@@ -566,7 +566,7 @@ define service {
         hostgroup_name          webhcat-server
         use                     hadoop-service
         service_description     WEBHCAT::WebHCat Server status
-        servicegroups           WEBHCAT 
+        servicegroups           HIVE
         {% if security_enabled %}
         check_command           check_templeton_status!{{ templeton_port }}!v1!{{ str(security_enabled).lower() }}!{{ nagios_keytab_path }}!{{ nagios_principal_name }}!{{ kinit_path_local }}
         {% else %}

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/WEBHCAT/configuration/webhcat-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/WEBHCAT/configuration/webhcat-env.xml b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/WEBHCAT/configuration/webhcat-env.xml
deleted file mode 100644
index 304bbb7..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/WEBHCAT/configuration/webhcat-env.xml
+++ /dev/null
@@ -1,54 +0,0 @@
-<?xml version="1.0"?>
-<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
-<!--
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
--->
-
-<configuration>
-  <!-- webhcat-env.sh -->
-  <property>
-    <name>content</name>
-    <description>webhcat-env.sh content</description>
-    <value>
-# The file containing the running pid
-PID_FILE={{pid_file}}
-
-TEMPLETON_LOG_DIR={{templeton_log_dir}}/
-
-
-WEBHCAT_LOG_DIR={{templeton_log_dir}}/
-
-# The console error log
-ERROR_LOG={{templeton_log_dir}}/webhcat-console-error.log
-
-# The console log
-CONSOLE_LOG={{templeton_log_dir}}/webhcat-console.log
-
-#TEMPLETON_JAR=templeton_jar_name
-
-#HADOOP_PREFIX=hadoop_prefix
-
-#HCAT_PREFIX=hive_prefix
-
-# Set HADOOP_HOME to point to a specific hadoop install directory
-export HADOOP_HOME=/usr/lib/hadoop
-    </value>
-  </property>
-  
-</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/WEBHCAT/configuration/webhcat-site.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/WEBHCAT/configuration/webhcat-site.xml b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/WEBHCAT/configuration/webhcat-site.xml
deleted file mode 100644
index b87ca7c..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/WEBHCAT/configuration/webhcat-site.xml
+++ /dev/null
@@ -1,156 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!-- 
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
--->
-
-<!-- The default settings for Templeton. -->
-<!-- Edit templeton-site.xml to change settings for your local -->
-<!-- install. -->
-
-<configuration supports_final="true">
-
-  <property>
-    <name>templeton.port</name>
-      <value>50111</value>
-    <description>The HTTP port for the main server.</description>
-  </property>
-
-  <property>
-    <name>templeton.hadoop.conf.dir</name>
-    <value>/etc/hadoop/conf</value>
-    <description>The path to the Hadoop configuration.</description>
-  </property>
-
-  <property>
-    <name>templeton.jar</name>
-    <value>/usr/lib/hcatalog/share/webhcat/svr/webhcat.jar</value>
-    <description>The path to the Templeton jar file.</description>
-  </property>
-
-  <property>
-    <name>templeton.libjars</name>
-    <value>/usr/lib/zookeeper/zookeeper.jar</value>
-    <description>Jars to add the the classpath.</description>
-  </property>
-
-
-  <property>
-    <name>templeton.hadoop</name>
-    <value>/usr/bin/hadoop</value>
-    <description>The path to the Hadoop executable.</description>
-  </property>
-
-  <property>
-    <name>templeton.pig.archive</name>
-    <value>hdfs:///apps/webhcat/pig.tar.gz</value>
-    <description>The path to the Pig archive.</description>
-  </property>
-
-  <property>
-    <name>templeton.pig.path</name>
-    <value>pig.tar.gz/pig/bin/pig</value>
-    <description>The path to the Pig executable.</description>
-  </property>
-
-  <property>
-    <name>templeton.hcat</name>
-    <value>/usr/bin/hcat</value>
-    <description>The path to the hcatalog executable.</description>
-  </property>
-
-  <property>
-    <name>templeton.hive.archive</name>
-    <value>hdfs:///apps/webhcat/hive.tar.gz</value>
-    <description>The path to the Hive archive.</description>
-  </property>
-
-  <property>
-    <name>templeton.hive.path</name>
-    <value>hive.tar.gz/hive/bin/hive</value>
-    <description>The path to the Hive executable.</description>
-  </property>
-
-  <property>
-    <name>templeton.hive.properties</name>
-    <value>hive.metastore.local=false, hive.metastore.uris=thrift://localhost:9933, hive.metastore.sasl.enabled=false</value>
-    <description>Properties to set when running hive.</description>
-  </property>
-
-
-  <property>
-    <name>templeton.zookeeper.hosts</name>
-    <value>localhost:2181</value>
-    <description>ZooKeeper servers, as comma separated host:port pairs</description>
-  </property>
-
-  <property>
-    <name>templeton.storage.class</name>
-    <value>org.apache.hcatalog.templeton.tool.ZooKeeperStorage</value>
-    <description>The class to use as storage</description>
-  </property>
-
-  <property>
-   <name>templeton.override.enabled</name>
-   <value>false</value>
-   <description>
-     Enable the override path in templeton.override.jars
-   </description>
- </property>
-
- <property>
-    <name>templeton.streaming.jar</name>
-    <value>hdfs:///apps/webhcat/hadoop-streaming.jar</value>
-    <description>The hdfs path to the Hadoop streaming jar file.</description>
-  </property> 
-
-  <property>
-    <name>templeton.exec.timeout</name>
-    <value>60000</value>
-    <description>Time out for templeton api</description>
-  </property>
-
-  <!-- webhcat-env.sh -->
-  <property>
-    <name>content</name>
-    <description>webhcat-env.sh content</description>
-    <value>
-# The file containing the running pid
-PID_FILE={{pid_file}}
-
-TEMPLETON_LOG_DIR={{templeton_log_dir}}/
-
-
-WEBHCAT_LOG_DIR={{templeton_log_dir}}/
-
-# The console error log
-ERROR_LOG={{templeton_log_dir}}/webhcat-console-error.log
-
-# The console log
-CONSOLE_LOG={{templeton_log_dir}}/webhcat-console.log
-
-#TEMPLETON_JAR=templeton_jar_name
-
-#HADOOP_PREFIX=hadoop_prefix
-
-#HCAT_PREFIX=hive_prefix
-
-# Set HADOOP_HOME to point to a specific hadoop install directory
-export HADOOP_HOME=/usr/lib/hadoop
-    </value>
-  </property>
-  
-</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/WEBHCAT/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/WEBHCAT/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/WEBHCAT/metainfo.xml
deleted file mode 100644
index b115c28..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/WEBHCAT/metainfo.xml
+++ /dev/null
@@ -1,103 +0,0 @@
-<?xml version="1.0"?>
-<!--
-   Licensed to the Apache Software Foundation (ASF) under one or more
-   contributor license agreements.  See the NOTICE file distributed with
-   this work for additional information regarding copyright ownership.
-   The ASF licenses this file to You under the Apache License, Version 2.0
-   (the "License"); you may not use this file except in compliance with
-   the License.  You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-   Unless required by applicable law or agreed to in writing, software
-   distributed under the License is distributed on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-   See the License for the specific language governing permissions and
-   limitations under the License.
--->
-<metainfo>
-  <schemaVersion>2.0</schemaVersion>
-  <services>
-    <service>
-      <name>WEBHCAT</name>
-      <displayName>WebHCat</displayName>
-      <comment>Provides a REST-like web API for HCatalog and related Hadoop components.</comment>
-      <version>0.11.0.1.3.3.0</version>
-      <components>
-        <component>
-          <name>WEBHCAT_SERVER</name>
-          <displayName>WebHCat Server</displayName>
-          <category>MASTER</category>
-          <cardinality>1</cardinality>
-          <dependencies>
-            <dependency>
-              <name>HDFS/HDFS_CLIENT</name>
-              <scope>host</scope>
-              <auto-deploy>
-                <enabled>true</enabled>
-              </auto-deploy>
-            </dependency>
-            <dependency>
-              <name>MAPREDUCE/MAPREDUCE_CLIENT</name>
-              <scope>host</scope>
-              <auto-deploy>
-                <enabled>true</enabled>
-              </auto-deploy>
-            </dependency>
-            <dependency>
-              <name>ZOOKEEPER/ZOOKEEPER_SERVER</name>
-              <scope>cluster</scope>
-              <auto-deploy>
-                <enabled>true</enabled>
-                <co-locate>WEBHCAT/WEBHCAT_SERVER</co-locate>
-              </auto-deploy>
-            </dependency>
-            <dependency>
-              <name>ZOOKEEPER/ZOOKEEPER_CLIENT</name>
-              <scope>host</scope>
-              <auto-deploy>
-                <enabled>true</enabled>
-              </auto-deploy>
-            </dependency>
-          </dependencies>
-          <commandScript>
-            <script>scripts/webhcat_server.py</script>
-            <scriptType>PYTHON</scriptType>
-            <timeout>600</timeout>
-          </commandScript>
-        </component>
-      </components>
-      <osSpecifics>
-        <osSpecific>
-          <osFamily>any</osFamily>
-          <packages>
-            <package>
-              <name>hcatalog</name>
-            </package>
-            <package>
-              <name>webhcat-tar-hive</name>
-            </package>
-            <package>
-              <name>webhcat-tar-pig</name>
-            </package>
-          </packages>
-        </osSpecific>
-      </osSpecifics>
-      <commandScript>
-        <script>scripts/service_check.py</script>
-        <scriptType>PYTHON</scriptType>
-        <timeout>300</timeout>
-      </commandScript>
-      
-      <requiredServices>
-        <service>ZOOKEEPER</service>
-        <service>HIVE</service>
-      </requiredServices>
-      
-      <configuration-dependencies>
-        <config-type>webhcat-site</config-type>
-        <config-type>webhcat-env</config-type>
-      </configuration-dependencies>
-    </service>
-  </services>
-</metainfo>

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/WEBHCAT/package/files/templetonSmoke.sh
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/WEBHCAT/package/files/templetonSmoke.sh b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/WEBHCAT/package/files/templetonSmoke.sh
deleted file mode 100644
index 21204e6..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/WEBHCAT/package/files/templetonSmoke.sh
+++ /dev/null
@@ -1,96 +0,0 @@
-#!/bin/sh
-#
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-#
-#
-
-export ttonhost=$1
-export smoke_test_user=$2
-export smoke_user_keytab=$3
-export security_enabled=$4
-export kinit_path_local=$5
-export ttonurl="http://${ttonhost}:50111/templeton/v1"
-
-if [[ $security_enabled == "true" ]]; then
-  kinitcmd="${kinit_path_local}  -kt ${smoke_user_keytab} ${smoke_test_user}; "
-else
-  kinitcmd=""
-fi
-
-export no_proxy=$ttonhost
-cmd="${kinitcmd}curl --negotiate -u : -s -w 'http_code <%{http_code}>'    $ttonurl/status 2>&1"
-retVal=`su - ${smoke_test_user} -c "$cmd"`
-httpExitCode=`echo $retVal |sed 's/.*http_code <\([0-9]*\)>.*/\1/'`
-
-if [[ "$httpExitCode" -ne "200" ]] ; then
-  echo "Templeton Smoke Test (status cmd): Failed. : $retVal"
-  export TEMPLETON_EXIT_CODE=1
-  exit 1
-fi
-
-exit 0
-
-#try hcat ddl command
-echo "user.name=${smoke_test_user}&exec=show databases;" /tmp/show_db.post.txt
-cmd="${kinitcmd}curl --negotiate -u : -s -w 'http_code <%{http_code}>' -d  \@${destdir}/show_db.post.txt  $ttonurl/ddl 2>&1"
-retVal=`su - ${smoke_test_user} -c "$cmd"`
-httpExitCode=`echo $retVal |sed 's/.*http_code <\([0-9]*\)>.*/\1/'`
-
-if [[ "$httpExitCode" -ne "200" ]] ; then
-  echo "Templeton Smoke Test (ddl cmd): Failed. : $retVal"
-  export TEMPLETON_EXIT_CODE=1
-  exit  1
-fi
-
-# NOT SURE?? SUHAS
-if [[ $security_enabled == "true" ]]; then
-  echo "Templeton Pig Smoke Tests not run in secure mode"
-  exit 0
-fi
-
-#try pig query
-outname=${smoke_test_user}.`date +"%M%d%y"`.$$;
-ttonTestOutput="/tmp/idtest.${outname}.out";
-ttonTestInput="/tmp/idtest.${outname}.in";
-ttonTestScript="idtest.${outname}.pig"
-
-echo "A = load '$ttonTestInput' using PigStorage(':');"  > /tmp/$ttonTestScript
-echo "B = foreach A generate \$0 as id; " >> /tmp/$ttonTestScript
-echo "store B into '$ttonTestOutput';" >> /tmp/$ttonTestScript
-
-#copy pig script to hdfs
-su - ${smoke_test_user} -c "hadoop dfs -copyFromLocal /tmp/$ttonTestScript /tmp/$ttonTestScript"
-
-#copy input file to hdfs
-su - ${smoke_test_user} -c "hadoop dfs -copyFromLocal /etc/passwd $ttonTestInput"
-
-#create, copy post args file
-echo -n "user.name=${smoke_test_user}&file=/tmp/$ttonTestScript" > /tmp/pig_post.txt
-
-#submit pig query
-cmd="curl -s -w 'http_code <%{http_code}>' -d  \@${destdir}/pig_post.txt  $ttonurl/pig 2>&1"
-retVal=`su - ${smoke_test_user} -c "$cmd"`
-httpExitCode=`echo $retVal |sed 's/.*http_code <\([0-9]*\)>.*/\1/'`
-if [[ "$httpExitCode" -ne "200" ]] ; then
-  echo "Templeton Smoke Test (pig cmd): Failed. : $retVal"
-  export TEMPLETON_EXIT_CODE=1
-  exit 1
-fi
-
-exit 0

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/WEBHCAT/package/scripts/__init__.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/WEBHCAT/package/scripts/__init__.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/WEBHCAT/package/scripts/__init__.py
deleted file mode 100644
index 35de4bb..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/WEBHCAT/package/scripts/__init__.py
+++ /dev/null
@@ -1,20 +0,0 @@
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-Ambari Agent
-
-"""