You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by jo...@apache.org on 2014/09/16 14:50:41 UTC

[01/27] git commit: AMBARI-7290. Usability: show 'Restart Required' bar on Summary tab.(xiwang)

Repository: ambari
Updated Branches:
  refs/heads/branch-alerts-dev 760bedfe3 -> 17b8e7990


AMBARI-7290. Usability: show 'Restart Required' bar on Summary tab.(xiwang)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/1dcb9dc6
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/1dcb9dc6
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/1dcb9dc6

Branch: refs/heads/branch-alerts-dev
Commit: 1dcb9dc62e59f5320940dcc3188398d15b706d33
Parents: 3355733
Author: Xi Wang <xi...@apache.org>
Authored: Fri Sep 12 17:28:47 2014 -0700
Committer: Xi Wang <xi...@apache.org>
Committed: Fri Sep 12 17:28:54 2014 -0700

----------------------------------------------------------------------
 .../controllers/main/service/info/configs.js    |  9 +--
 ambari-web/app/styles/application.less          | 14 +++++
 .../app/templates/main/service/info/configs.hbs |  2 +-
 .../app/templates/main/service/info/summary.hbs | 26 +++++++-
 .../app/views/main/service/info/summary.js      | 62 ++++++++++++++++++++
 5 files changed, 107 insertions(+), 6 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/1dcb9dc6/ambari-web/app/controllers/main/service/info/configs.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/controllers/main/service/info/configs.js b/ambari-web/app/controllers/main/service/info/configs.js
index 2280d01..cea4630 100644
--- a/ambari-web/app/controllers/main/service/info/configs.js
+++ b/ambari-web/app/controllers/main/service/info/configs.js
@@ -2328,9 +2328,10 @@ App.MainServiceInfoConfigsController = Em.Controller.extend(App.ServerValidatorM
    * trigger showItemsShouldBeRestarted popup with hosts that requires resetart
    * @method showHostsShouldBeRestarted
    */
-  showHostsShouldBeRestarted: function () {
+  showHostsShouldBeRestarted: function (restartRequiredHostsAndComponents) {
     var hosts = [];
-    for (var hostName in this.get('content.restartRequiredHostsAndComponents')) {
+    var rhc = this.get('content.restartRequiredHostsAndComponents') || restartRequiredHostsAndComponents;
+    for (var hostName in rhc) {
       hosts.push(hostName);
     }
     var hostsText = hosts.length == 1 ? Em.I18n.t('common.host') : Em.I18n.t('common.hosts');
@@ -2342,8 +2343,8 @@ App.MainServiceInfoConfigsController = Em.Controller.extend(App.ServerValidatorM
    * trigger showItemsShouldBeRestarted popup with components that requires resetart
    * @method showComponentsShouldBeRestarted
    */
-  showComponentsShouldBeRestarted: function () {
-    var rhc = this.get('content.restartRequiredHostsAndComponents');
+  showComponentsShouldBeRestarted: function (restartRequiredHostsAndComponents) {
+    var rhc = this.get('content.restartRequiredHostsAndComponents') || restartRequiredHostsAndComponents;
     var hostsComponets = [];
     var componentsObject = {};
     for (var hostName in rhc) {

http://git-wip-us.apache.org/repos/asf/ambari/blob/1dcb9dc6/ambari-web/app/styles/application.less
----------------------------------------------------------------------
diff --git a/ambari-web/app/styles/application.less b/ambari-web/app/styles/application.less
index e4b455e..7a4138c 100644
--- a/ambari-web/app/styles/application.less
+++ b/ambari-web/app/styles/application.less
@@ -1066,6 +1066,19 @@ h1 {
   padding: 8px 0;
 }
 
+#summary-restart-bar {
+  margin-top: 20px;
+  .alert{
+    .icon-refresh{
+      margin-left:10px;
+      color: #fdb82f;
+    }
+  }
+  .dropdown-menu > li > a:hover {
+    text-shadow: none;
+  }
+}
+
 #serviceConfig {
   margin-top: 20px;
   .alert{
@@ -5597,6 +5610,7 @@ ul.inline li {
     width:757px!important
   }
 
+
   .assign-masters {
     .host-assignments {
       margin-left: 0;

http://git-wip-us.apache.org/repos/asf/ambari/blob/1dcb9dc6/ambari-web/app/templates/main/service/info/configs.hbs
----------------------------------------------------------------------
diff --git a/ambari-web/app/templates/main/service/info/configs.hbs b/ambari-web/app/templates/main/service/info/configs.hbs
index 5335bca..b4ec65c 100644
--- a/ambari-web/app/templates/main/service/info/configs.hbs
+++ b/ambari-web/app/templates/main/service/info/configs.hbs
@@ -23,7 +23,7 @@
         {{#if App.isAdmin}}
           <div>
             <div class="alert alert-warning clearfix">
-              <i class="icon-refresh"></i> {{{view.needToRestartMessage}}} {{t services.service.config.restartService.needToRestart}}  <a href="#" {{action showComponentsShouldBeRestarted target="controller"}}>{{view.componentsCount}} {{pluralize view.componentsCount singular="t:common.component" plural="t:common.components"}}</a> {{t on}} <a href="#" {{action showHostsShouldBeRestarted target="controller"}}>{{view.hostsCount}} {{pluralize view.hostsCount singular="t:common.host" plural="t:common.hosts"}}</a>
+              <i class="icon-refresh"></i> {{t services.service.config.restartService.needToRestart}}  <a href="#" {{action showComponentsShouldBeRestarted target="controller"}}>{{view.componentsCount}} {{pluralize view.componentsCount singular="t:common.component" plural="t:common.components"}}</a> {{t on}} <a href="#" {{action showHostsShouldBeRestarted target="controller"}}>{{view.hostsCount}} {{pluralize view.hostsCount singular="t:common.host" plural="t:common.hosts"}}</a>
               <span class="restart-components pull-right">&nbsp</span>
               <div class="btn-group pull-right">
                 <button type="button" class="btn btn-default dropdown-toggle btn-warning" data-toggle="dropdown">

http://git-wip-us.apache.org/repos/asf/ambari/blob/1dcb9dc6/ambari-web/app/templates/main/service/info/summary.hbs
----------------------------------------------------------------------
diff --git a/ambari-web/app/templates/main/service/info/summary.hbs b/ambari-web/app/templates/main/service/info/summary.hbs
index 88225b8..080a3f7 100644
--- a/ambari-web/app/templates/main/service/info/summary.hbs
+++ b/ambari-web/app/templates/main/service/info/summary.hbs
@@ -16,9 +16,33 @@
 * limitations under the License.
 }}
 
+{{#if App.supports.hostOverrides}}
+  {{#if view.service.isRestartRequired}}
+    {{#if App.isAdmin}}
+      <div id="summary-restart-bar">
+        <div class="alert alert-warning clearfix">
+          <i class="icon-refresh"></i> {{{view.needToRestartMessage}}} {{t services.service.config.restartService.needToRestart}}  <a href="#" {{action showComponentsShouldBeRestarted target="view"}}>{{view.componentsCount}} {{pluralize view.componentsCount singular="t:common.component" plural="t:common.components"}}</a> {{t on}} <a href="#" {{action showHostsShouldBeRestarted target="view"}}>{{view.hostsCount}} {{pluralize view.hostsCount singular="t:common.host" plural="t:common.hosts"}}</a>
+          <span class="restart-components pull-right">&nbsp</span>
+          <div class="btn-group pull-right">
+            <button type="button" class="btn btn-default dropdown-toggle btn-warning" data-toggle="dropdown">
+              {{t hosts.host.details.needToRestart.button}}
+                <span class="caret"></span>
+            </button>
+            <ul class="dropdown-menu">
+                <li><a href="#" {{action restartAllStaleConfigComponents target="view"}}>{{t restart.service.all}}</a></li>
+              {{#if view.rollingRestartSlaveComponentName}}
+                  <li><a href="#" {{action rollingRestartStaleConfigSlaveComponents view.rollingRestartSlaveComponentName target="view"}}>{{view.rollingRestartActionName}}</a></li>
+              {{/if}}
+            </ul>
+          </div>
+        </div>
+      </div>
+    {{/if}}
+  {{/if}}
+{{/if}}
+
 <div class="row-fluid service-block">
   <div class="span6">
-
     <div class="box">
       <div class="box-header">
         <h4>{{controller.content.label}} {{t services.service.info.menu.summary}}</h4>

http://git-wip-us.apache.org/repos/asf/ambari/blob/1dcb9dc6/ambari-web/app/views/main/service/info/summary.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views/main/service/info/summary.js b/ambari-web/app/views/main/service/info/summary.js
index ae4a814..8d5ed4e 100644
--- a/ambari-web/app/views/main/service/info/summary.js
+++ b/ambari-web/app/views/main/service/info/summary.js
@@ -16,6 +16,7 @@
  */
 
 var App = require('app');
+var batchUtils = require('utils/batch_scheduled_requests');
 require('views/main/service/service');
 
 App.AlertItemView = Em.View.extend({
@@ -252,6 +253,67 @@ App.MainServiceInfoSummaryView = Em.View.extend({
 
   oldServiceName:'',
 
+  /*
+   * 'Restart Required bar' start
+   */
+  componentsCount: null,
+  hostsCount: null,
+
+  restartRequiredHostsAndComponents:function () {
+    return this.get('controller.content.restartRequiredHostsAndComponents');
+  }.property('controller.content.restartRequiredHostsAndComponents'),
+
+  updateComponentInformation: function() {
+    var hc = this.get('restartRequiredHostsAndComponents');
+    var hostsCount = 0;
+    var componentsCount = 0;
+    for (var host in hc) {
+      hostsCount++;
+      componentsCount += hc[host].length;
+    }
+    this.set('componentsCount', componentsCount);
+    this.set('hostsCount', hostsCount);
+  }.observes('restartRequiredHostsAndComponents'),
+
+  rollingRestartSlaveComponentName : function() {
+    return batchUtils.getRollingRestartComponentName(this.get('serviceName'));
+  }.property('serviceName'),
+  rollingRestartActionName : function() {
+    var label = null;
+    var componentName = this.get('rollingRestartSlaveComponentName');
+    if (componentName) {
+      label = Em.I18n.t('rollingrestart.dialog.title').format(App.format.role(componentName));
+    }
+    return label;
+  }.property('rollingRestartSlaveComponentName'),
+  showComponentsShouldBeRestarted: function () {
+    var rhc = this.get('restartRequiredHostsAndComponents');
+    App.router.get('mainServiceInfoConfigsController').showComponentsShouldBeRestarted(rhc);
+  },
+  showHostsShouldBeRestarted: function () {
+    var rhc = this.get('restartRequiredHostsAndComponents');
+    App.router.get('mainServiceInfoConfigsController').showHostsShouldBeRestarted(rhc);
+  },
+  restartAllStaleConfigComponents: function () {
+    var self = this;
+    var serviceDisplayName = this.get('service.displayName');
+    var bodyMessage = Em.Object.create({
+      confirmMsg: Em.I18n.t('services.service.restartAll.confirmMsg').format(serviceDisplayName),
+      confirmButton: Em.I18n.t('services.service.restartAll.confirmButton'),
+      additionalWarningMsg: this.get('service.passiveState') === 'OFF' ? Em.I18n.t('services.service.restartAll.warningMsg.turnOnMM').format(serviceDisplayName) : null
+    });
+    return App.showConfirmationFeedBackPopup(function (query) {
+      var selectedService = self.get('service.id');
+      batchUtils.restartAllServiceHostComponents(selectedService, true, query);
+    }, bodyMessage);
+  },
+  rollingRestartStaleConfigSlaveComponents: function (componentName) {
+    batchUtils.launchHostComponentRollingRestart(componentName.context, this.get('service.displayName'), this.get('service.passiveState') === "ON", true);
+  },
+  /*
+   * 'Restart Required bar' ended
+   */
+
   /**
    * Contains graphs for this particular service
    */


[11/27] git commit: AMBARI-7289 - Views : Delete view instance causes auto sign out

Posted by jo...@apache.org.
AMBARI-7289 - Views : Delete view instance causes auto sign out


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/002a190f
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/002a190f
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/002a190f

Branch: refs/heads/branch-alerts-dev
Commit: 002a190fc42752c5a695f29bfc5fb5e1ecbf0052
Parents: 02e9fdb
Author: tbeerbower <tb...@hortonworks.com>
Authored: Fri Sep 12 17:54:33 2014 -0400
Committer: tbeerbower <tb...@hortonworks.com>
Committed: Mon Sep 15 13:19:43 2014 -0400

----------------------------------------------------------------------
 .../server/controller/AmbariHandlerList.java    | 33 +++++++++++++++++++-
 1 file changed, 32 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/002a190f/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariHandlerList.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariHandlerList.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariHandlerList.java
index 6a831f8..da15a66 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariHandlerList.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariHandlerList.java
@@ -32,6 +32,7 @@ import org.apache.ambari.view.SystemException;
 import org.apache.ambari.view.ViewContext;
 import org.eclipse.jetty.server.Handler;
 import org.eclipse.jetty.server.SessionManager;
+import org.eclipse.jetty.server.session.SessionHandler;
 import org.eclipse.jetty.servlet.FilterHolder;
 import org.eclipse.jetty.webapp.WebAppContext;
 import org.springframework.web.context.WebApplicationContext;
@@ -91,7 +92,7 @@ public class AmbariHandlerList extends FailsafeHandlerList implements ViewInstan
         context.setClassLoader(viewInstanceDefinition.getViewEntity().getClassLoader());
         context.setAttribute(ViewContext.CONTEXT_ATTRIBUTE, new ViewContextImpl(viewInstanceDefinition, viewRegistry));
 
-        context.getSessionHandler().setSessionManager(sessionManager);
+        context.setSessionHandler(new SharedSessionHandler(sessionManager));
         context.getServletContext().setAttribute(WebApplicationContext.ROOT_WEB_APPLICATION_CONTEXT_ATTRIBUTE, springWebAppContext);
         context.addFilter(new FilterHolder(springSecurityFilter), "/*", 1);
 
@@ -142,6 +143,7 @@ public class AmbariHandlerList extends FailsafeHandlerList implements ViewInstan
   public void removeViewInstance(ViewInstanceEntity viewInstanceDefinition) {
     Handler handler = handlerMap.get(viewInstanceDefinition);
     if (handler != null) {
+      handlerMap.remove(viewInstanceDefinition);
       removeHandler(handler);
     }
   }
@@ -181,4 +183,33 @@ public class AmbariHandlerList extends FailsafeHandlerList implements ViewInstan
      */
     public Handler create(ViewInstanceEntity viewInstanceDefinition, String webApp, String contextPath);
   }
+
+
+  // ----- inner class : SharedSessionHandler --------------------------------
+
+  /**
+   * A session handler that shares its session manager with another app.
+   * This handler DOES NOT attempt stop the shared session manager.
+   */
+  private static class SharedSessionHandler extends SessionHandler {
+
+    // ----- Constructors ----------------------------------------------------
+
+    /**
+     * Construct a SharedSessionHandler.
+     *
+     * @param manager  the shared session manager.
+     */
+    public SharedSessionHandler(SessionManager manager) {
+      super(manager);
+    }
+
+
+    // ----- SessionHandler --------------------------------------------------
+
+    @Override
+    protected void doStop() throws Exception {
+      // do nothing...
+    }
+  }
 }


[16/27] AMBARI-7296. HCatalog and WebHCat services should not be managed as separate service (should be part of Hive service) (jaimin)

Posted by jo...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-web/app/assets/data/alerts/alerts.json
----------------------------------------------------------------------
diff --git a/ambari-web/app/assets/data/alerts/alerts.json b/ambari-web/app/assets/data/alerts/alerts.json
index bcf804f..4ce39ea 100644
--- a/ambari-web/app/assets/data/alerts/alerts.json
+++ b/ambari-web/app/assets/data/alerts/alerts.json
@@ -5,7 +5,7 @@
       "href" : "http://ec2-54-234-53-225.compute-1.amazonaws.com:8080/api/v1/clusters/test/hosts/ip-10-191-202-42.ec2.internal/host_components/NAGIOS_SERVER",
       "HostRoles" : {
         "cluster_name" : "test",
-        "nagios_alerts" : {"alerts":[{"service_description":"Ganglia [gmetad] Process down","host_name":"ip-10-191-202-42.ec2.internal","current_attempt":"1","current_state":"1","plugin_output":"TCP OK - 0.004 second response time on port 8651","last_hard_state_change":"1359058506","last_hard_state":"0","last_time_ok":"1359072006","last_time_warning":"0","last_time_unknown":"0","last_time_critical":"0","is_flapping":"0","last_check":"1359072006","service_type":"GANGLIA"},{"service_description":"Ganglia collector [gmond] Process down alert for hbasemaster","host_name":"ip-10-191-202-42.ec2.internal","current_attempt":"1","current_state":"0","plugin_output":"TCP OK - 0.009 second response time on port 8663","last_hard_state_change":"1359058517","last_hard_state":"0","last_time_ok":"1359072002","last_time_warning":"0","last_time_unknown":"0","last_time_critical":"0","is_flapping":"0","last_check":"1359072002","service_type":"GANGLIA"},{"service_description":"Ganglia collector [gmond] P
 rocess down alert for jobtracker","host_name":"ip-10-191-202-42.ec2.internal","current_attempt":"1","current_state":"0","plugin_output":"TCP OK - 0.009 second response time on port 8662","last_hard_state_change":"1359058528","last_hard_state":"0","last_time_ok":"1359072013","last_time_warning":"0","last_time_unknown":"0","last_time_critical":"0","is_flapping":"0","last_check":"1359072013","service_type":"GANGLIA"},{"service_description":"Ganglia collector [gmond] Process down alert for namenode","host_name":"ip-10-191-202-42.ec2.internal","current_attempt":"1","current_state":"0","plugin_output":"TCP OK - 0.004 second response time on port 8661","last_hard_state_change":"1359058540","last_hard_state":"0","last_time_ok":"1359072010","last_time_warning":"0","last_time_unknown":"0","last_time_critical":"0","is_flapping":"0","last_check":"1359072010","service_type":"GANGLIA"},{"service_description":"Ganglia collector [gmond] Process down alert for slaves","host_name":"ip-10-191-202-42.e
 c2.internal","current_attempt":"1","current_state":"0","plugin_output":"TCP OK - 0.009 second response time on port 8660","last_hard_state_change":"1359058551","last_hard_state":"0","last_time_ok":"1359072006","last_time_warning":"0","last_time_unknown":"0","last_time_critical":"0","is_flapping":"0","last_check":"1359072006","service_type":"GANGLIA"},{"service_description":"Percent region servers down","host_name":"ip-10-191-202-42.ec2.internal","current_attempt":"1","current_state":"0","plugin_output":"OK: total:&lt;1&gt;, affected:&lt;0&gt;","last_hard_state_change":"1359058802","last_hard_state":"0","last_time_ok":"1359072002","last_time_warning":"0","last_time_unknown":"0","last_time_critical":"1359058772","is_flapping":"0","last_check":"1359072002","service_type":"HBASE"},{"service_description":"HBase Web UI down","host_name":"ip-10-191-202-42.ec2.internal","current_attempt":"1","current_state":"0","plugin_output":"OK: Successfully accessed hbase Web UI","last_hard_state_change
 ":"1359058574","last_hard_state":"0","last_time_ok":"1359072014","last_time_warning":"1359058634","last_time_unknown":"0","last_time_critical":"0","is_flapping":"0","last_check":"1359072014","service_type":"HBASE"},{"service_description":"HBaseMaster CPU utilization","host_name":"ip-10-191-202-42.ec2.internal","current_attempt":"1","current_state":"0","plugin_output":"2 CPU, average load 14.0% &lt; 200% : OK","last_hard_state_change":"1359058585","last_hard_state":"0","last_time_ok":"1359071785","last_time_warning":"0","last_time_unknown":"0","last_time_critical":"0","is_flapping":"0","last_check":"1359071785","service_type":"HBASE"},{"service_description":"HBaseMaster Process down","host_name":"ip-10-191-202-42.ec2.internal","current_attempt":"1","current_state":"0","plugin_output":"TCP OK - 0.000 second response time on port 60000","last_hard_state_change":"1359058596","last_hard_state":"0","last_time_ok":"1359071991","last_time_warning":"0","last_time_unknown":"0","last_time_crit
 ical":"1359058626","is_flapping":"0","last_check":"1359071991","service_type":"HBASE"},{"service_description":"Corrupt\\/Missing blocks","host_name":"ip-10-191-202-42.ec2.internal","current_attempt":"1","current_state":"0","plugin_output":"OK: corrupt_blocks:&lt;0&gt;, missing_blocks:&lt;0&gt;, total_blocks:&lt;249&gt;","last_hard_state_change":"1359058509","last_hard_state":"0","last_time_ok":"1359071949","last_time_warning":"0","last_time_unknown":"0","last_time_critical":"0","is_flapping":"0","last_check":"1359071949","service_type":"HDFS"},{"service_description":"HDFS Capacity utilization","host_name":"ip-10-191-202-42.ec2.internal","current_attempt":"1","current_state":"0","plugin_output":"OK: DFSUsedGB:&lt;0.1&gt;, DFSTotalGB:&lt;784.4&gt;","last_hard_state_change":"1359058520","last_hard_state":"0","last_time_ok":"1359071720","last_time_warning":"0","last_time_unknown":"0","last_time_critical":"0","is_flapping":"0","last_check":"1359071720","service_type":"HDFS"},{"service_de
 scription":"Namenode RPC Latency","host_name":"ip-10-191-202-42.ec2.internal","current_attempt":"1","current_state":"0","plugin_output":"OK: RpcQueueTime_avg_time:&lt;0&gt; Secs, RpcProcessingTime_avg_time:&lt;0&gt; Secs","last_hard_state_change":"1359058531","last_hard_state":"0","last_time_ok":"1359071731","last_time_warning":"0","last_time_unknown":"0","last_time_critical":"0","is_flapping":"0","last_check":"1359071731","service_type":"HDFS"},{"service_description":"Percent DataNodes down","host_name":"ip-10-191-202-42.ec2.internal","current_attempt":"1","current_state":"0","plugin_output":"OK: total:&lt;1&gt;, affected:&lt;0&gt;","last_hard_state_change":"1359058543","last_hard_state":"0","last_time_ok":"1359072013","last_time_warning":"0","last_time_unknown":"0","last_time_critical":"0","is_flapping":"0","last_check":"1359072013","service_type":"HDFS"},{"service_description":"Percent DataNodes storage full","host_name":"ip-10-191-202-42.ec2.internal","current_attempt":"1","curr
 ent_state":"0","plugin_output":"OK: total:&lt;1&gt;, affected:&lt;0&gt;","last_hard_state_change":"1359058554","last_hard_state":"0","last_time_ok":"1359071994","last_time_warning":"0","last_time_unknown":"0","last_time_critical":"0","is_flapping":"0","last_check":"1359071994","service_type":"HDFS"},{"service_description":"Percent TaskTrackers down","host_name":"ip-10-191-202-42.ec2.internal","current_attempt":"1","current_state":"0","plugin_output":"OK: total:&lt;1&gt;, affected:&lt;0&gt;","last_hard_state_change":"1359058745","last_hard_state":"0","last_time_ok":"1359072005","last_time_warning":"0","last_time_unknown":"0","last_time_critical":"1359058715","is_flapping":"0","last_check":"1359072005","service_type":"MAPREDUCE"},{"service_description":"Nagios status log staleness","host_name":"ip-10-191-202-42.ec2.internal","current_attempt":"1","current_state":"0","plugin_output":"NAGIOS OK: 2 processes, status log updated 9 seconds ago","last_hard_state_change":"1359058576","last_h
 ard_state":"0","last_time_ok":"1359071776","last_time_warning":"0","last_time_unknown":"0","last_time_critical":"0","is_flapping":"0","last_check":"1359071776","service_type":"UNKNOWN"},{"service_description":"Namenode Edit logs directory status","host_name":"ip-10-191-202-42.ec2.internal","current_attempt":"1","current_state":"0","plugin_output":"OK: All Namenode directories are active","last_hard_state_change":"1359058588","last_hard_state":"0","last_time_ok":"1359071998","last_time_warning":"0","last_time_unknown":"0","last_time_critical":"0","is_flapping":"0","last_check":"1359071998","service_type":"HDFS"},{"service_description":"Namenode Host CPU utilization","host_name":"ip-10-191-202-42.ec2.internal","current_attempt":"1","current_state":"0","plugin_output":"2 CPU, average load 14.5% &lt; 200% : OK","last_hard_state_change":"1359058599","last_hard_state":"0","last_time_ok":"1359071799","last_time_warning":"0","last_time_unknown":"0","last_time_critical":"0","is_flapping":"0"
 ,"last_check":"1359071799","service_type":"HDFS"},{"service_description":"Namenode Process down","host_name":"ip-10-191-202-42.ec2.internal","current_attempt":"1","current_state":"0","plugin_output":"TCP OK - 0.001 second response time on port 8020","last_hard_state_change":"1359058511","last_hard_state":"0","last_time_ok":"1359071996","last_time_warning":"0","last_time_unknown":"0","last_time_critical":"1359058511","is_flapping":"0","last_check":"1359071996","service_type":"HDFS"},{"service_description":"Namenode Web UI down","host_name":"ip-10-191-202-42.ec2.internal","current_attempt":"1","current_state":"0","plugin_output":"OK: Successfully accessed namenode Web UI","last_hard_state_change":"1359058523","last_hard_state":"0","last_time_ok":"1359071963","last_time_warning":"0","last_time_unknown":"0","last_time_critical":"0","is_flapping":"0","last_check":"1359071963","service_type":"HDFS"},{"service_description":"ZKSERVERS Process down","host_name":"ip-10-191-202-42.ec2.internal
 ","current_attempt":"1","current_state":"0","plugin_output":"TCP OK - 0.003 second response time on port 2181","last_hard_state_change":"1359058654","last_hard_state":"0","last_time_ok":"1359071974","last_time_warning":"0","last_time_unknown":"0","last_time_critical":"1359058594","is_flapping":"0","last_check":"1359071974","service_type":"UNKNOWN"},{"service_description":"Percent zookeeper servers down","host_name":"ip-10-191-202-42.ec2.internal","current_attempt":"1","current_state":"0","plugin_output":"OK: total:&lt;3&gt;, affected:&lt;0&gt;","last_hard_state_change":"1359058545","last_hard_state":"0","last_time_ok":"1359072015","last_time_warning":"0","last_time_unknown":"0","last_time_critical":"0","is_flapping":"0","last_check":"1359072015","service_type":"ZOOKEEPER"},{"service_description":"HIVE-METASTORE status check","host_name":"ip-10-12-194-214.ec2.internal","current_attempt":"1","current_state":"0","plugin_output":"OK: Hive metaserver status OK","last_hard_state_change":"
 1359058677","last_hard_state":"0","last_time_ok":"1359071997","last_time_warning":"0","last_time_unknown":"0","last_time_critical":"1359058647","is_flapping":"0","last_check":"1359071997","service_type":"HIVE"},{"service_description":"JobHistory Web UI down","host_name":"ip-10-12-194-214.ec2.internal","current_attempt":"1","current_state":"0","plugin_output":"OK: Successfully accessed jobhistory Web UI","last_hard_state_change":"1359058568","last_hard_state":"0","last_time_ok":"1359072008","last_time_warning":"1359058628","last_time_unknown":"0","last_time_critical":"0","is_flapping":"0","last_check":"1359072008","service_type":"MAPREDUCE"},{"service_description":"JobTracker Web UI down","host_name":"ip-10-12-194-214.ec2.internal","current_attempt":"1","current_state":"0","plugin_output":"OK: Successfully accessed jobtracker Web UI","last_hard_state_change":"1359058579","last_hard_state":"0","last_time_ok":"1359071959","last_time_warning":"1359058639","last_time_unknown":"0","last_t
 ime_critical":"0","is_flapping":"0","last_check":"1359071959","service_type":"MAPREDUCE"},{"service_description":"Jobtracker CPU utilization","host_name":"ip-10-12-194-214.ec2.internal","current_attempt":"1","current_state":"0","plugin_output":"2 CPU, average load 2.5% &lt; 200% : OK","last_hard_state_change":"1359058591","last_hard_state":"0","last_time_ok":"1359071791","last_time_warning":"0","last_time_unknown":"0","last_time_critical":"0","is_flapping":"0","last_check":"1359071791","service_type":"MAPREDUCE"},{"service_description":"Jobtracker Process down","host_name":"ip-10-12-194-214.ec2.internal","current_attempt":"1","current_state":"0","plugin_output":"TCP OK - 0.064 second response time on port 50030","last_hard_state_change":"1359058677","last_hard_state":"0","last_time_ok":"1359071997","last_time_warning":"0","last_time_unknown":"0","last_time_critical":"1359058647","is_flapping":"0","last_check":"1359071997","service_type":"MAPREDUCE"},{"service_description":"JobTracke
 r RPC Latency","host_name":"ip-10-12-194-214.ec2.internal","current_attempt":"1","current_state":"0","plugin_output":"OK: RpcQueueTime_avg_time:&lt;0.2&gt; Secs, RpcProcessingTime_avg_time:&lt;0.24&gt; Secs","last_hard_state_change":"1359058514","last_hard_state":"0","last_time_ok":"1359072014","last_time_warning":"0","last_time_unknown":"0","last_time_critical":"0","is_flapping":"0","last_check":"1359072014","service_type":"MAPREDUCE"},{"service_description":"Oozie status check","host_name":"ip-10-12-194-214.ec2.internal","current_attempt":"1","current_state":"0","plugin_output":"OK: Oozie server status [System mode: NORMAL]","last_hard_state_change":"1359058826","last_hard_state":"0","last_time_ok":"1359071966","last_time_warning":"0","last_time_unknown":"0","last_time_critical":"1359058766","is_flapping":"0","last_check":"1359071966","service_type":"OOZIE"},{"service_description":"WEBHCAT status check","host_name":"ip-10-12-194-214.ec2.internal","current_attempt":"1","current_sta
 te":"0","plugin_output":"","last_hard_state_change":"1359058897","last_hard_state":"0","last_time_ok":"1359071977","last_time_warning":"0","last_time_unknown":"0","last_time_critical":"1359058837","is_flapping":"","last_check":"","service_type":"WEBHCAT"},{"service_description":"ZKSERVERS Process down","host_name":"ip-10-12-194-214.ec2.internal","current_attempt":"1","current_state":"0","plugin_output":"TCP OK - 0.001 second response time on port 2181","last_hard_state_change":"1359058548","last_hard_state":"0","last_time_ok":"1359071988","last_time_warning":"0","last_time_unknown":"0","last_time_critical":"0","is_flapping":"0","last_check":"1359071988","service_type":"UNKNOWN"},{"service_description":"ZKSERVERS Process down","host_name":"ip-10-204-141-167.ec2.internal","current_attempt":"1","current_state":"0","plugin_output":"TCP OK - 0.031 second response time on port 2181","last_hard_state_change":"1359058559","last_hard_state":"0","last_time_ok":"1359071999","last_time_warning"
 :"0","last_time_unknown":"0","last_time_critical":"0","is_flapping":"0","last_check":"1359071999","service_type":"UNKNOWN"},{"service_description":"Process down","host_name":"ip-10-80-69-221.ec2.internal","current_attempt":"1","current_state":"0","plugin_output":"TCP OK - 0.127 second response time on port 50010","last_hard_state_change":"1359058571","last_hard_state":"0","last_time_ok":"1359072011","last_time_warning":"0","last_time_unknown":"0","last_time_critical":"0","is_flapping":"0","last_check":"1359072011","service_type":"UNKNOWN"},{"service_description":"Storage full","host_name":"ip-10-80-69-221.ec2.internal","current_attempt":"1","current_state":"0","plugin_output":"OK: Capacity:[], Remaining Capacity:[], percent_full:[0]","last_hard_state_change":"1359058582","last_hard_state":"0","last_time_ok":"1359071782","last_time_warning":"0","last_time_unknown":"0","last_time_critical":"0","is_flapping":"0","last_check":"1359071782","service_type":"UNKNOWN"},{"service_description"
 :"Process down","host_name":"ip-10-80-69-221.ec2.internal","current_attempt":"1","current_state":"0","plugin_output":"TCP OK - 0.001 second response time on port 60020","last_hard_state_change":"1359058773","last_hard_state":"0","last_time_ok":"1359071973","last_time_warning":"0","last_time_unknown":"0","last_time_critical":"1359058713","is_flapping":"0","last_check":"1359071973","service_type":"UNKNOWN"},{"service_description":"Process down","host_name":"ip-10-80-69-221.ec2.internal","current_attempt":"1","current_state":"0","plugin_output":"TCP OK - 0.001 second response time on port 50060","last_hard_state_change":"1359058725","last_hard_state":"0","last_time_ok":"1359071985","last_time_warning":"0","last_time_unknown":"0","last_time_critical":"1359058665","is_flapping":"0","last_check":"1359071985","service_type":"UNKNOWN"}],"hostcounts":{"up_hosts":4,"down_hosts":0},"servicestates":{"PUPPET":0,"HBASE":"0","HDFS":"0","ZOOKEEPER":0,"HIVE-METASTORE":"0","MAPREDUCE":"0","OOZIE":"0"
 }},
+        "nagios_alerts" : {"alerts":[{"service_description":"Ganglia [gmetad] Process down","host_name":"ip-10-191-202-42.ec2.internal","current_attempt":"1","current_state":"1","plugin_output":"TCP OK - 0.004 second response time on port 8651","last_hard_state_change":"1359058506","last_hard_state":"0","last_time_ok":"1359072006","last_time_warning":"0","last_time_unknown":"0","last_time_critical":"0","is_flapping":"0","last_check":"1359072006","service_type":"GANGLIA"},{"service_description":"Ganglia collector [gmond] Process down alert for hbasemaster","host_name":"ip-10-191-202-42.ec2.internal","current_attempt":"1","current_state":"0","plugin_output":"TCP OK - 0.009 second response time on port 8663","last_hard_state_change":"1359058517","last_hard_state":"0","last_time_ok":"1359072002","last_time_warning":"0","last_time_unknown":"0","last_time_critical":"0","is_flapping":"0","last_check":"1359072002","service_type":"GANGLIA"},{"service_description":"Ganglia collector [gmond] P
 rocess down alert for jobtracker","host_name":"ip-10-191-202-42.ec2.internal","current_attempt":"1","current_state":"0","plugin_output":"TCP OK - 0.009 second response time on port 8662","last_hard_state_change":"1359058528","last_hard_state":"0","last_time_ok":"1359072013","last_time_warning":"0","last_time_unknown":"0","last_time_critical":"0","is_flapping":"0","last_check":"1359072013","service_type":"GANGLIA"},{"service_description":"Ganglia collector [gmond] Process down alert for namenode","host_name":"ip-10-191-202-42.ec2.internal","current_attempt":"1","current_state":"0","plugin_output":"TCP OK - 0.004 second response time on port 8661","last_hard_state_change":"1359058540","last_hard_state":"0","last_time_ok":"1359072010","last_time_warning":"0","last_time_unknown":"0","last_time_critical":"0","is_flapping":"0","last_check":"1359072010","service_type":"GANGLIA"},{"service_description":"Ganglia collector [gmond] Process down alert for slaves","host_name":"ip-10-191-202-42.e
 c2.internal","current_attempt":"1","current_state":"0","plugin_output":"TCP OK - 0.009 second response time on port 8660","last_hard_state_change":"1359058551","last_hard_state":"0","last_time_ok":"1359072006","last_time_warning":"0","last_time_unknown":"0","last_time_critical":"0","is_flapping":"0","last_check":"1359072006","service_type":"GANGLIA"},{"service_description":"Percent region servers down","host_name":"ip-10-191-202-42.ec2.internal","current_attempt":"1","current_state":"0","plugin_output":"OK: total:&lt;1&gt;, affected:&lt;0&gt;","last_hard_state_change":"1359058802","last_hard_state":"0","last_time_ok":"1359072002","last_time_warning":"0","last_time_unknown":"0","last_time_critical":"1359058772","is_flapping":"0","last_check":"1359072002","service_type":"HBASE"},{"service_description":"HBase Web UI down","host_name":"ip-10-191-202-42.ec2.internal","current_attempt":"1","current_state":"0","plugin_output":"OK: Successfully accessed hbase Web UI","last_hard_state_change
 ":"1359058574","last_hard_state":"0","last_time_ok":"1359072014","last_time_warning":"1359058634","last_time_unknown":"0","last_time_critical":"0","is_flapping":"0","last_check":"1359072014","service_type":"HBASE"},{"service_description":"HBaseMaster CPU utilization","host_name":"ip-10-191-202-42.ec2.internal","current_attempt":"1","current_state":"0","plugin_output":"2 CPU, average load 14.0% &lt; 200% : OK","last_hard_state_change":"1359058585","last_hard_state":"0","last_time_ok":"1359071785","last_time_warning":"0","last_time_unknown":"0","last_time_critical":"0","is_flapping":"0","last_check":"1359071785","service_type":"HBASE"},{"service_description":"HBaseMaster Process down","host_name":"ip-10-191-202-42.ec2.internal","current_attempt":"1","current_state":"0","plugin_output":"TCP OK - 0.000 second response time on port 60000","last_hard_state_change":"1359058596","last_hard_state":"0","last_time_ok":"1359071991","last_time_warning":"0","last_time_unknown":"0","last_time_crit
 ical":"1359058626","is_flapping":"0","last_check":"1359071991","service_type":"HBASE"},{"service_description":"Corrupt\\/Missing blocks","host_name":"ip-10-191-202-42.ec2.internal","current_attempt":"1","current_state":"0","plugin_output":"OK: corrupt_blocks:&lt;0&gt;, missing_blocks:&lt;0&gt;, total_blocks:&lt;249&gt;","last_hard_state_change":"1359058509","last_hard_state":"0","last_time_ok":"1359071949","last_time_warning":"0","last_time_unknown":"0","last_time_critical":"0","is_flapping":"0","last_check":"1359071949","service_type":"HDFS"},{"service_description":"HDFS Capacity utilization","host_name":"ip-10-191-202-42.ec2.internal","current_attempt":"1","current_state":"0","plugin_output":"OK: DFSUsedGB:&lt;0.1&gt;, DFSTotalGB:&lt;784.4&gt;","last_hard_state_change":"1359058520","last_hard_state":"0","last_time_ok":"1359071720","last_time_warning":"0","last_time_unknown":"0","last_time_critical":"0","is_flapping":"0","last_check":"1359071720","service_type":"HDFS"},{"service_de
 scription":"Namenode RPC Latency","host_name":"ip-10-191-202-42.ec2.internal","current_attempt":"1","current_state":"0","plugin_output":"OK: RpcQueueTime_avg_time:&lt;0&gt; Secs, RpcProcessingTime_avg_time:&lt;0&gt; Secs","last_hard_state_change":"1359058531","last_hard_state":"0","last_time_ok":"1359071731","last_time_warning":"0","last_time_unknown":"0","last_time_critical":"0","is_flapping":"0","last_check":"1359071731","service_type":"HDFS"},{"service_description":"Percent DataNodes down","host_name":"ip-10-191-202-42.ec2.internal","current_attempt":"1","current_state":"0","plugin_output":"OK: total:&lt;1&gt;, affected:&lt;0&gt;","last_hard_state_change":"1359058543","last_hard_state":"0","last_time_ok":"1359072013","last_time_warning":"0","last_time_unknown":"0","last_time_critical":"0","is_flapping":"0","last_check":"1359072013","service_type":"HDFS"},{"service_description":"Percent DataNodes storage full","host_name":"ip-10-191-202-42.ec2.internal","current_attempt":"1","curr
 ent_state":"0","plugin_output":"OK: total:&lt;1&gt;, affected:&lt;0&gt;","last_hard_state_change":"1359058554","last_hard_state":"0","last_time_ok":"1359071994","last_time_warning":"0","last_time_unknown":"0","last_time_critical":"0","is_flapping":"0","last_check":"1359071994","service_type":"HDFS"},{"service_description":"Percent TaskTrackers down","host_name":"ip-10-191-202-42.ec2.internal","current_attempt":"1","current_state":"0","plugin_output":"OK: total:&lt;1&gt;, affected:&lt;0&gt;","last_hard_state_change":"1359058745","last_hard_state":"0","last_time_ok":"1359072005","last_time_warning":"0","last_time_unknown":"0","last_time_critical":"1359058715","is_flapping":"0","last_check":"1359072005","service_type":"MAPREDUCE"},{"service_description":"Nagios status log staleness","host_name":"ip-10-191-202-42.ec2.internal","current_attempt":"1","current_state":"0","plugin_output":"NAGIOS OK: 2 processes, status log updated 9 seconds ago","last_hard_state_change":"1359058576","last_h
 ard_state":"0","last_time_ok":"1359071776","last_time_warning":"0","last_time_unknown":"0","last_time_critical":"0","is_flapping":"0","last_check":"1359071776","service_type":"UNKNOWN"},{"service_description":"Namenode Edit logs directory status","host_name":"ip-10-191-202-42.ec2.internal","current_attempt":"1","current_state":"0","plugin_output":"OK: All Namenode directories are active","last_hard_state_change":"1359058588","last_hard_state":"0","last_time_ok":"1359071998","last_time_warning":"0","last_time_unknown":"0","last_time_critical":"0","is_flapping":"0","last_check":"1359071998","service_type":"HDFS"},{"service_description":"Namenode Host CPU utilization","host_name":"ip-10-191-202-42.ec2.internal","current_attempt":"1","current_state":"0","plugin_output":"2 CPU, average load 14.5% &lt; 200% : OK","last_hard_state_change":"1359058599","last_hard_state":"0","last_time_ok":"1359071799","last_time_warning":"0","last_time_unknown":"0","last_time_critical":"0","is_flapping":"0"
 ,"last_check":"1359071799","service_type":"HDFS"},{"service_description":"Namenode Process down","host_name":"ip-10-191-202-42.ec2.internal","current_attempt":"1","current_state":"0","plugin_output":"TCP OK - 0.001 second response time on port 8020","last_hard_state_change":"1359058511","last_hard_state":"0","last_time_ok":"1359071996","last_time_warning":"0","last_time_unknown":"0","last_time_critical":"1359058511","is_flapping":"0","last_check":"1359071996","service_type":"HDFS"},{"service_description":"Namenode Web UI down","host_name":"ip-10-191-202-42.ec2.internal","current_attempt":"1","current_state":"0","plugin_output":"OK: Successfully accessed namenode Web UI","last_hard_state_change":"1359058523","last_hard_state":"0","last_time_ok":"1359071963","last_time_warning":"0","last_time_unknown":"0","last_time_critical":"0","is_flapping":"0","last_check":"1359071963","service_type":"HDFS"},{"service_description":"ZKSERVERS Process down","host_name":"ip-10-191-202-42.ec2.internal
 ","current_attempt":"1","current_state":"0","plugin_output":"TCP OK - 0.003 second response time on port 2181","last_hard_state_change":"1359058654","last_hard_state":"0","last_time_ok":"1359071974","last_time_warning":"0","last_time_unknown":"0","last_time_critical":"1359058594","is_flapping":"0","last_check":"1359071974","service_type":"UNKNOWN"},{"service_description":"Percent zookeeper servers down","host_name":"ip-10-191-202-42.ec2.internal","current_attempt":"1","current_state":"0","plugin_output":"OK: total:&lt;3&gt;, affected:&lt;0&gt;","last_hard_state_change":"1359058545","last_hard_state":"0","last_time_ok":"1359072015","last_time_warning":"0","last_time_unknown":"0","last_time_critical":"0","is_flapping":"0","last_check":"1359072015","service_type":"ZOOKEEPER"},{"service_description":"HIVE-METASTORE status check","host_name":"ip-10-12-194-214.ec2.internal","current_attempt":"1","current_state":"0","plugin_output":"OK: Hive metaserver status OK","last_hard_state_change":"
 1359058677","last_hard_state":"0","last_time_ok":"1359071997","last_time_warning":"0","last_time_unknown":"0","last_time_critical":"1359058647","is_flapping":"0","last_check":"1359071997","service_type":"HIVE"},{"service_description":"JobHistory Web UI down","host_name":"ip-10-12-194-214.ec2.internal","current_attempt":"1","current_state":"0","plugin_output":"OK: Successfully accessed jobhistory Web UI","last_hard_state_change":"1359058568","last_hard_state":"0","last_time_ok":"1359072008","last_time_warning":"1359058628","last_time_unknown":"0","last_time_critical":"0","is_flapping":"0","last_check":"1359072008","service_type":"MAPREDUCE"},{"service_description":"JobTracker Web UI down","host_name":"ip-10-12-194-214.ec2.internal","current_attempt":"1","current_state":"0","plugin_output":"OK: Successfully accessed jobtracker Web UI","last_hard_state_change":"1359058579","last_hard_state":"0","last_time_ok":"1359071959","last_time_warning":"1359058639","last_time_unknown":"0","last_t
 ime_critical":"0","is_flapping":"0","last_check":"1359071959","service_type":"MAPREDUCE"},{"service_description":"Jobtracker CPU utilization","host_name":"ip-10-12-194-214.ec2.internal","current_attempt":"1","current_state":"0","plugin_output":"2 CPU, average load 2.5% &lt; 200% : OK","last_hard_state_change":"1359058591","last_hard_state":"0","last_time_ok":"1359071791","last_time_warning":"0","last_time_unknown":"0","last_time_critical":"0","is_flapping":"0","last_check":"1359071791","service_type":"MAPREDUCE"},{"service_description":"Jobtracker Process down","host_name":"ip-10-12-194-214.ec2.internal","current_attempt":"1","current_state":"0","plugin_output":"TCP OK - 0.064 second response time on port 50030","last_hard_state_change":"1359058677","last_hard_state":"0","last_time_ok":"1359071997","last_time_warning":"0","last_time_unknown":"0","last_time_critical":"1359058647","is_flapping":"0","last_check":"1359071997","service_type":"MAPREDUCE"},{"service_description":"JobTracke
 r RPC Latency","host_name":"ip-10-12-194-214.ec2.internal","current_attempt":"1","current_state":"0","plugin_output":"OK: RpcQueueTime_avg_time:&lt;0.2&gt; Secs, RpcProcessingTime_avg_time:&lt;0.24&gt; Secs","last_hard_state_change":"1359058514","last_hard_state":"0","last_time_ok":"1359072014","last_time_warning":"0","last_time_unknown":"0","last_time_critical":"0","is_flapping":"0","last_check":"1359072014","service_type":"MAPREDUCE"},{"service_description":"Oozie status check","host_name":"ip-10-12-194-214.ec2.internal","current_attempt":"1","current_state":"0","plugin_output":"OK: Oozie server status [System mode: NORMAL]","last_hard_state_change":"1359058826","last_hard_state":"0","last_time_ok":"1359071966","last_time_warning":"0","last_time_unknown":"0","last_time_critical":"1359058766","is_flapping":"0","last_check":"1359071966","service_type":"OOZIE"},{"service_description":"ZKSERVERS Process down","host_name":"ip-10-12-194-214.ec2.internal","current_attempt":"1","current_s
 tate":"0","plugin_output":"TCP OK - 0.001 second response time on port 2181","last_hard_state_change":"1359058548","last_hard_state":"0","last_time_ok":"1359071988","last_time_warning":"0","last_time_unknown":"0","last_time_critical":"0","is_flapping":"0","last_check":"1359071988","service_type":"UNKNOWN"},{"service_description":"ZKSERVERS Process down","host_name":"ip-10-204-141-167.ec2.internal","current_attempt":"1","current_state":"0","plugin_output":"TCP OK - 0.031 second response time on port 2181","last_hard_state_change":"1359058559","last_hard_state":"0","last_time_ok":"1359071999","last_time_warning":"0","last_time_unknown":"0","last_time_critical":"0","is_flapping":"0","last_check":"1359071999","service_type":"UNKNOWN"},{"service_description":"Process down","host_name":"ip-10-80-69-221.ec2.internal","current_attempt":"1","current_state":"0","plugin_output":"TCP OK - 0.127 second response time on port 50010","last_hard_state_change":"1359058571","last_hard_state":"0","last
 _time_ok":"1359072011","last_time_warning":"0","last_time_unknown":"0","last_time_critical":"0","is_flapping":"0","last_check":"1359072011","service_type":"UNKNOWN"},{"service_description":"Storage full","host_name":"ip-10-80-69-221.ec2.internal","current_attempt":"1","current_state":"0","plugin_output":"OK: Capacity:[], Remaining Capacity:[], percent_full:[0]","last_hard_state_change":"1359058582","last_hard_state":"0","last_time_ok":"1359071782","last_time_warning":"0","last_time_unknown":"0","last_time_critical":"0","is_flapping":"0","last_check":"1359071782","service_type":"UNKNOWN"},{"service_description":"Process down","host_name":"ip-10-80-69-221.ec2.internal","current_attempt":"1","current_state":"0","plugin_output":"TCP OK - 0.001 second response time on port 60020","last_hard_state_change":"1359058773","last_hard_state":"0","last_time_ok":"1359071973","last_time_warning":"0","last_time_unknown":"0","last_time_critical":"1359058713","is_flapping":"0","last_check":"135907197
 3","service_type":"UNKNOWN"},{"service_description":"Process down","host_name":"ip-10-80-69-221.ec2.internal","current_attempt":"1","current_state":"0","plugin_output":"TCP OK - 0.001 second response time on port 50060","last_hard_state_change":"1359058725","last_hard_state":"0","last_time_ok":"1359071985","last_time_warning":"0","last_time_unknown":"0","last_time_critical":"1359058665","is_flapping":"0","last_check":"1359071985","service_type":"UNKNOWN"}],"hostcounts":{"up_hosts":4,"down_hosts":0},"servicestates":{"PUPPET":0,"HBASE":"0","HDFS":"0","ZOOKEEPER":0,"HIVE-METASTORE":"0","MAPREDUCE":"0","OOZIE":"0"}},
         "component_name" : "NAGIOS_SERVER",
         "host_name" : "ip-10-191-202-42.ec2.internal"
       },

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-web/app/assets/data/dashboard/services.json
----------------------------------------------------------------------
diff --git a/ambari-web/app/assets/data/dashboard/services.json b/ambari-web/app/assets/data/dashboard/services.json
index 9f7ec8a..b1dbc8e 100644
--- a/ambari-web/app/assets/data/dashboard/services.json
+++ b/ambari-web/app/assets/data/dashboard/services.json
@@ -1056,42 +1056,6 @@
       ]
     },
     {
-      "href" : "http://ec2-184-73-46-113.compute-1.amazonaws.com:8080/api/v1/clusters/cl1/services/WEBHCAT",
-      "ServiceInfo" : {
-        "cluster_name" : "cl1",
-        "service_name" : "WEBHCAT"
-      },
-      "components" : [
-        {
-          "href" : "http://ec2-184-73-46-113.compute-1.amazonaws.com:8080/api/v1/clusters/cl1/services/WEBHCAT/components/WEBHCAT_SERVER",
-          "ServiceComponentInfo" : {
-            "cluster_name" : "cl1",
-            "desired_configs" : { },
-            "state" : "STARTED",
-            "component_name" : "WEBHCAT_SERVER",
-            "service_name" : "WEBHCAT"
-          },
-          "host_components" : [
-            {
-              "href" : "http://ec2-184-73-46-113.compute-1.amazonaws.com:8080/api/v1/clusters/cl1/hosts/domU-12-31-39-0E-E6-01.compute-1.internal/host_components/WEBHCAT_SERVER",
-              "HostRoles" : {
-                "configs" : {
-                  "global" : "version1",
-                  "webhcat-site" : "version1"
-                },
-                "cluster_name" : "cl1",
-                "desired_configs" : { },
-                "desired_state" : "STARTED",
-                "state" : "STARTED",
-                "component_name" : "WEBHCAT_SERVER",
-                "host_name" : "domU-12-31-39-0E-E6-01.compute-1.internal"
-              }
-            }
-          ]
-        }
-      ]
-    },
-    {
       "href" : "http://ec2-184-73-46-113.compute-1.amazonaws.com:8080/api/v1/clusters/cl1/services/HUE",
       "ServiceInfo" : {
         "cluster_name" : "cl1",
@@ -1506,87 +1470,6 @@
       ]
     },
     {
-      "href" : "http://ec2-184-73-46-113.compute-1.amazonaws.com:8080/api/v1/clusters/cl1/services/HCATALOG",
-      "ServiceInfo" : {
-        "cluster_name" : "cl1",
-        "service_name" : "HCATALOG"
-      },
-      "components" : [
-        {
-          "href" : "http://ec2-184-73-46-113.compute-1.amazonaws.com:8080/api/v1/clusters/cl1/services/HCATALOG/components/HCAT",
-          "ServiceComponentInfo" : {
-            "cluster_name" : "cl1",
-            "desired_configs" : { },
-            "state" : "INSTALLED",
-            "component_name" : "HCAT",
-            "service_name" : "HCATALOG"
-          },
-          "host_components" : [
-            {
-              "href" : "http://ec2-184-73-46-113.compute-1.amazonaws.com:8080/api/v1/clusters/cl1/hosts/domU-12-31-39-16-48-4B.compute-1.internal/host_components/HCAT",
-              "HostRoles" : {
-                "configs" : { },
-                "cluster_name" : "cl1",
-                "desired_configs" : { },
-                "desired_state" : "INSTALLED",
-                "state" : "INSTALLED",
-                "component_name" : "HCAT",
-                "host_name" : "domU-12-31-39-16-48-4B.compute-1.internal"
-              }
-            },
-            {
-              "href" : "http://ec2-184-73-46-113.compute-1.amazonaws.com:8080/api/v1/clusters/cl1/hosts/domU-12-31-39-0E-E6-01.compute-1.internal/host_components/HCAT",
-              "HostRoles" : {
-                "configs" : { },
-                "cluster_name" : "cl1",
-                "desired_configs" : { },
-                "desired_state" : "INSTALLED",
-                "state" : "INSTALLED",
-                "component_name" : "HCAT",
-                "host_name" : "domU-12-31-39-0E-E6-01.compute-1.internal"
-              }
-            },
-            {
-              "href" : "http://ec2-184-73-46-113.compute-1.amazonaws.com:8080/api/v1/clusters/cl1/hosts/ip-10-110-79-42.ec2.internal/host_components/HCAT",
-              "HostRoles" : {
-                "configs" : { },
-                "cluster_name" : "cl1",
-                "desired_configs" : { },
-                "desired_state" : "INSTALLED",
-                "state" : "INSTALLED",
-                "component_name" : "HCAT",
-                "host_name" : "ip-10-110-79-42.ec2.internal"
-              }
-            },
-            {
-              "href" : "http://ec2-184-73-46-113.compute-1.amazonaws.com:8080/api/v1/clusters/cl1/hosts/ip-10-110-38-164.ec2.internal/host_components/HCAT",
-              "HostRoles" : {
-                "configs" : { },
-                "cluster_name" : "cl1",
-                "desired_configs" : { },
-                "desired_state" : "INSTALLED",
-                "state" : "INSTALLED",
-                "component_name" : "HCAT",
-                "host_name" : "ip-10-110-38-164.ec2.internal"
-              }
-            },
-            {
-              "href" : "http://ec2-184-73-46-113.compute-1.amazonaws.com:8080/api/v1/clusters/cl1/hosts/ip-10-191-202-42.ec2.internal/host_components/HCAT",
-              "HostRoles" : {
-                "configs" : { },
-                "cluster_name" : "cl1",
-                "desired_configs" : { },
-                "desired_state" : "INSTALLED",
-                "state" : "INSTALLED",
-                "component_name" : "HCAT",
-                "host_name" : "ip-10-191-202-42.ec2.internal"
-              }
-            }
-          ]
-        }
-      ]
-    },
-    {
       "href" : "http://ec2-184-73-46-113.compute-1.amazonaws.com:8080/api/v1/clusters/cl1/services/ZOOKEEPER",
       "ServiceInfo" : {
         "cluster_name" : "cl1",

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-web/app/assets/data/hosts/HDP2/hosts.json
----------------------------------------------------------------------
diff --git a/ambari-web/app/assets/data/hosts/HDP2/hosts.json b/ambari-web/app/assets/data/hosts/HDP2/hosts.json
index 2533488..0e2e3fc 100644
--- a/ambari-web/app/assets/data/hosts/HDP2/hosts.json
+++ b/ambari-web/app/assets/data/hosts/HDP2/hosts.json
@@ -115,15 +115,6 @@
         },
         {
           "HostRoles" : {
-            "component_name" : "HCAT",
-            "maintenance_state" : "OFF",
-            "service_name" : "HCATALOG",
-            "stale_configs" : false,
-            "state" : "INSTALL_FAILED"
-          }
-        },
-        {
-          "HostRoles" : {
             "component_name" : "HDFS_CLIENT",
             "maintenance_state" : "OFF",
             "service_name" : "HDFS",
@@ -315,7 +306,7 @@
           "HostRoles" : {
             "component_name" : "WEBHCAT_SERVER",
             "maintenance_state" : "OFF",
-            "service_name" : "WEBHCAT",
+            "service_name" : "HIVE",
             "stale_configs" : false,
             "state" : "INSTALL_FAILED"
           }

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-web/app/assets/data/services/HDP2/components_state.json
----------------------------------------------------------------------
diff --git a/ambari-web/app/assets/data/services/HDP2/components_state.json b/ambari-web/app/assets/data/services/HDP2/components_state.json
index 900737f..cbcb149 100644
--- a/ambari-web/app/assets/data/services/HDP2/components_state.json
+++ b/ambari-web/app/assets/data/services/HDP2/components_state.json
@@ -38,15 +38,6 @@
     },
     {
       "ServiceComponentInfo" : {
-        "component_name" : "HCAT",
-        "installed_count" : 1,
-        "service_name" : "HCATALOG",
-        "started_count" : 0,
-        "total_count" : 1
-      }
-    },
-    {
-      "ServiceComponentInfo" : {
         "component_name" : "DATANODE",
         "installed_count" : 0,
         "service_name" : "HDFS",

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-web/app/assets/data/services/HDP2/services.json
----------------------------------------------------------------------
diff --git a/ambari-web/app/assets/data/services/HDP2/services.json b/ambari-web/app/assets/data/services/HDP2/services.json
index 3cb544d..c10dbb6 100644
--- a/ambari-web/app/assets/data/services/HDP2/services.json
+++ b/ambari-web/app/assets/data/services/HDP2/services.json
@@ -50,22 +50,6 @@
       }
     },
     {
-      "href" : "http://192.168.56.101:8080/api/v1/clusters/cl/services/HCATALOG",
-      "ServiceInfo" : {
-        "maintenance_state" : "OFF",
-        "cluster_name" : "cl",
-        "service_name" : "HCATALOG",
-        "state": "STARTED"
-      },
-      "alerts" : {
-        "summary" : {
-          "CRITICAL" : 0,
-          "OK" : 0,
-          "WARNING" : 0
-        }
-      }
-    },
-    {
       "href" : "http://192.168.56.101:8080/api/v1/clusters/cl/services/HDFS",
       "ServiceInfo" : {
         "maintenance_state" : "OFF",

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-web/app/assets/data/services/host_component_actual_configs.json
----------------------------------------------------------------------
diff --git a/ambari-web/app/assets/data/services/host_component_actual_configs.json b/ambari-web/app/assets/data/services/host_component_actual_configs.json
index ef80049..8956fee 100644
--- a/ambari-web/app/assets/data/services/host_component_actual_configs.json
+++ b/ambari-web/app/assets/data/services/host_component_actual_configs.json
@@ -1078,42 +1078,7 @@
         }
       ]
     },
-    {
-      "href" : "http://ec2-184-73-46-113.compute-1.amazonaws.com:8080/api/v1/clusters/cl1/services/WEBHCAT",
-      "ServiceInfo" : {
-        "cluster_name" : "cl1",
-        "service_name" : "WEBHCAT"
-      },
-      "components" : [
-        {
-          "href" : "http://ec2-184-73-46-113.compute-1.amazonaws.com:8080/api/v1/clusters/cl1/services/WEBHCAT/components/WEBHCAT_SERVER",
-          "ServiceComponentInfo" : {
-            "cluster_name" : "cl1",
-            "desired_configs" : { },
-            "state" : "STARTED",
-            "component_name" : "WEBHCAT_SERVER",
-            "service_name" : "WEBHCAT"
-          },
-          "host_components" : [
-            {
-              "href" : "http://ec2-184-73-46-113.compute-1.amazonaws.com:8080/api/v1/clusters/cl1/hosts/domU-12-31-39-0E-E6-01.compute-1.internal/host_components/WEBHCAT_SERVER",
-              "HostRoles" : {
-                "configs" : {
-                  "global" : "version1",
-                  "webhcat-site" : "version1"
-                },
-                "cluster_name" : "cl1",
-                "desired_configs" : { },
-                "desired_state" : "STARTED",
-                "state" : "STARTED",
-                "component_name" : "WEBHCAT_SERVER",
-                "host_name" : "domU-12-31-39-0E-E6-01.compute-1.internal"
-              }
-            }
-          ]
-        }
-      ]
-    },
+
     {
       "href" : "http://ec2-184-73-46-113.compute-1.amazonaws.com:8080/api/v1/clusters/cl1/services/HUE",
       "ServiceInfo" : {
@@ -1457,87 +1422,6 @@
       ]
     },
     {
-      "href" : "http://ec2-184-73-46-113.compute-1.amazonaws.com:8080/api/v1/clusters/cl1/services/HCATALOG",
-      "ServiceInfo" : {
-        "cluster_name" : "cl1",
-        "service_name" : "HCATALOG"
-      },
-      "components" : [
-        {
-          "href" : "http://ec2-184-73-46-113.compute-1.amazonaws.com:8080/api/v1/clusters/cl1/services/HCATALOG/components/HCAT",
-          "ServiceComponentInfo" : {
-            "cluster_name" : "cl1",
-            "desired_configs" : { },
-            "state" : "INSTALLED",
-            "component_name" : "HCAT",
-            "service_name" : "HCATALOG"
-          },
-          "host_components" : [
-            {
-              "href" : "http://ec2-184-73-46-113.compute-1.amazonaws.com:8080/api/v1/clusters/cl1/hosts/domU-12-31-39-16-48-4B.compute-1.internal/host_components/HCAT",
-              "HostRoles" : {
-                "configs" : { },
-                "cluster_name" : "cl1",
-                "desired_configs" : { },
-                "desired_state" : "INSTALLED",
-                "state" : "INSTALLED",
-                "component_name" : "HCAT",
-                "host_name" : "domU-12-31-39-16-48-4B.compute-1.internal"
-              }
-            },
-            {
-              "href" : "http://ec2-184-73-46-113.compute-1.amazonaws.com:8080/api/v1/clusters/cl1/hosts/domU-12-31-39-0E-E6-01.compute-1.internal/host_components/HCAT",
-              "HostRoles" : {
-                "configs" : { },
-                "cluster_name" : "cl1",
-                "desired_configs" : { },
-                "desired_state" : "INSTALLED",
-                "state" : "INSTALLED",
-                "component_name" : "HCAT",
-                "host_name" : "domU-12-31-39-0E-E6-01.compute-1.internal"
-              }
-            },
-            {
-              "href" : "http://ec2-184-73-46-113.compute-1.amazonaws.com:8080/api/v1/clusters/cl1/hosts/ip-10-110-79-42.ec2.internal/host_components/HCAT",
-              "HostRoles" : {
-                "configs" : { },
-                "cluster_name" : "cl1",
-                "desired_configs" : { },
-                "desired_state" : "INSTALLED",
-                "state" : "INSTALLED",
-                "component_name" : "HCAT",
-                "host_name" : "ip-10-110-79-42.ec2.internal"
-              }
-            },
-            {
-              "href" : "http://ec2-184-73-46-113.compute-1.amazonaws.com:8080/api/v1/clusters/cl1/hosts/ip-10-110-38-164.ec2.internal/host_components/HCAT",
-              "HostRoles" : {
-                "configs" : { },
-                "cluster_name" : "cl1",
-                "desired_configs" : { },
-                "desired_state" : "INSTALLED",
-                "state" : "INSTALLED",
-                "component_name" : "HCAT",
-                "host_name" : "ip-10-110-38-164.ec2.internal"
-              }
-            },
-            {
-              "href" : "http://ec2-184-73-46-113.compute-1.amazonaws.com:8080/api/v1/clusters/cl1/hosts/ip-10-191-202-42.ec2.internal/host_components/HCAT",
-              "HostRoles" : {
-                "configs" : { },
-                "cluster_name" : "cl1",
-                "desired_configs" : { },
-                "desired_state" : "INSTALLED",
-                "state" : "INSTALLED",
-                "component_name" : "HCAT",
-                "host_name" : "ip-10-191-202-42.ec2.internal"
-              }
-            }
-          ]
-        }
-      ]
-    },
-    {
       "href" : "http://ec2-184-73-46-113.compute-1.amazonaws.com:8080/api/v1/clusters/cl1/services/ZOOKEEPER",
       "ServiceInfo" : {
         "cluster_name" : "cl1",

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-web/app/assets/data/stacks/HDP-2.1/recommendations.json
----------------------------------------------------------------------
diff --git a/ambari-web/app/assets/data/stacks/HDP-2.1/recommendations.json b/ambari-web/app/assets/data/stacks/HDP-2.1/recommendations.json
index 1b851bf..95bd778 100644
--- a/ambari-web/app/assets/data/stacks/HDP-2.1/recommendations.json
+++ b/ambari-web/app/assets/data/stacks/HDP-2.1/recommendations.json
@@ -6,7 +6,7 @@
     "stack_version": "2.1.1"
   },
   "hosts": ["dev1.hortonworks.com", "dev2.hortonworks.com", "dev3.hortonworks.com"],
-  "services": ["FALCON", "FLUME", "GANGLIA", "HBASE", "HCATALOG", "HDFS", "HIVE", "MAPREDUCE2", "NAGIOS", "OOZIE", "PIG", "SQOOP", "STORM", "TEZ", "WEBCHAT", "YARN", "ZOOKEEPER"],
+  "services": ["FALCON", "FLUME", "GANGLIA", "HBASE", "HDFS", "HIVE", "MAPREDUCE2", "NAGIOS", "OOZIE", "PIG", "SQOOP", "STORM", "TEZ", "WEBCHAT", "YARN", "ZOOKEEPER"],
   "recommendations": {
     "blueprint": {
       "configurations": {

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-web/app/assets/data/stacks/HDP-2.1/recommendations_configs.json
----------------------------------------------------------------------
diff --git a/ambari-web/app/assets/data/stacks/HDP-2.1/recommendations_configs.json b/ambari-web/app/assets/data/stacks/HDP-2.1/recommendations_configs.json
index 4efe599..6dacfeb 100644
--- a/ambari-web/app/assets/data/stacks/HDP-2.1/recommendations_configs.json
+++ b/ambari-web/app/assets/data/stacks/HDP-2.1/recommendations_configs.json
@@ -1,6 +1,6 @@
 {
   "hosts": ["ab2test-5.c.pramod-thangali.internal", "ab2test-6.c.pramod-thangali.internal", "ab2test-7.c.pramod-thangali.internal"],
-  "services": ["HDFS", "MAPREDUCE2", "YARN", "TEZ", "NAGIOS", "GANGLIA", "HIVE", "HCATALOG", "WEBHCAT", "SQOOP", "OOZIE", "ZOOKEEPER", "FALCON", "STORM", "FLUME", "PIG"],
+  "services": ["HDFS", "MAPREDUCE2", "YARN", "TEZ", "NAGIOS", "GANGLIA", "HIVE", "SQOOP", "OOZIE", "ZOOKEEPER", "FALCON", "STORM", "FLUME", "PIG"],
   "recommendations": {
   "blueprint": {
     "host_groups": [

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-web/app/assets/data/stacks/HDP-2.1/service_components.json
----------------------------------------------------------------------
diff --git a/ambari-web/app/assets/data/stacks/HDP-2.1/service_components.json b/ambari-web/app/assets/data/stacks/HDP-2.1/service_components.json
index 7bf0093..7c56a30 100644
--- a/ambari-web/app/assets/data/stacks/HDP-2.1/service_components.json
+++ b/ambari-web/app/assets/data/stacks/HDP-2.1/service_components.json
@@ -320,42 +320,6 @@
       ]
     },
     {
-      "href" : "http://192.168.56.101:8080/api/v1/stacks/HDP/versions/2.1/services/HCATALOG",
-      "StackServices" : {
-        "comments" : "This is comment for HCATALOG service",
-        "custom_commands" : [ ],
-        "display_name" : "HCatalog",
-        "required_services" : [
-          "HIVE"
-        ],
-        "service_check_supported" : true,
-        "service_name" : "HCATALOG",
-        "service_version" : "0.12.0.2.1",
-        "stack_name" : "HDP",
-        "stack_version" : "2.1",
-        "user_name" : null,
-        "config_types" : { }
-      },
-      "serviceComponents" : [
-        {
-          "href" : "http://192.168.56.101:8080/api/v1/stacks/HDP/versions/2.1/services/HCATALOG/components/HCAT",
-          "StackServiceComponents" : {
-            "cardinality" : null,
-            "component_category" : "CLIENT",
-            "component_name" : "HCAT",
-            "custom_commands" : [ ],
-            "display_name" : "HCat",
-            "is_client" : true,
-            "is_master" : false,
-            "service_name" : "HCATALOG",
-            "stack_name" : "HDP",
-            "stack_version" : "2.1"
-          },
-          "dependencies" : [ ]
-        }
-      ]
-    },
-    {
       "href" : "http://192.168.56.101:8080/api/v1/stacks/HDP/versions/2.1/services/HDFS",
       "StackServices" : {
         "comments" : "Apache Hadoop Distributed File System",
@@ -558,6 +522,22 @@
           "dependencies" : [ ]
         },
         {
+          "href" : "http://192.168.56.101:8080/api/v1/stacks/HDP/versions/2.1/services/HCATALOG/components/HCAT",
+          "StackServiceComponents" : {
+            "cardinality" : null,
+            "component_category" : "CLIENT",
+            "component_name" : "HCAT",
+            "custom_commands" : [ ],
+            "display_name" : "HCat",
+            "is_client" : true,
+            "is_master" : false,
+            "service_name" : "HCATALOG",
+            "stack_name" : "HDP",
+            "stack_version" : "2.1"
+          },
+          "dependencies" : [ ]
+        },
+        {
           "href" : "http://192.168.56.101:8080/api/v1/stacks/HDP/versions/2.1/services/HIVE/components/HIVE_METASTORE",
           "StackServiceComponents" : {
             "cardinality" : "1",
@@ -635,6 +615,73 @@
           ]
         },
         {
+          "href" : "http://192.168.56.101:8080/api/v1/stacks/HDP/versions/2.1/services/WEBHCAT/components/WEBHCAT_SERVER",
+          "StackServiceComponents" : {
+            "cardinality" : "1",
+            "component_category" : "MASTER",
+            "component_name" : "WEBHCAT_SERVER",
+            "custom_commands" : [ ],
+            "display_name" : "WebHCat Server",
+            "is_client" : false,
+            "is_master" : true,
+            "service_name" : "WEBHCAT",
+            "stack_name" : "HDP",
+            "stack_version" : "2.1"
+          },
+          "dependencies" : [
+            {
+              "href" : "http://192.168.56.101:8080/api/v1/stacks/HDP/versions/2.1/services/WEBHCAT/components/WEBHCAT_SERVER/dependencies/HDFS_CLIENT",
+              "Dependencies" : {
+                "component_name" : "HDFS_CLIENT",
+                "dependent_component_name" : "WEBHCAT_SERVER",
+                "dependent_service_name" : "WEBHCAT",
+                "stack_name" : "HDP",
+                "stack_version" : "2.1"
+              }
+            },
+            {
+              "href" : "http://192.168.56.101:8080/api/v1/stacks/HDP/versions/2.1/services/WEBHCAT/components/WEBHCAT_SERVER/dependencies/MAPREDUCE2_CLIENT",
+              "Dependencies" : {
+                "component_name" : "MAPREDUCE2_CLIENT",
+                "dependent_component_name" : "WEBHCAT_SERVER",
+                "dependent_service_name" : "WEBHCAT",
+                "stack_name" : "HDP",
+                "stack_version" : "2.1"
+              }
+            },
+            {
+              "href" : "http://192.168.56.101:8080/api/v1/stacks/HDP/versions/2.1/services/WEBHCAT/components/WEBHCAT_SERVER/dependencies/YARN_CLIENT",
+              "Dependencies" : {
+                "component_name" : "YARN_CLIENT",
+                "dependent_component_name" : "WEBHCAT_SERVER",
+                "dependent_service_name" : "WEBHCAT",
+                "stack_name" : "HDP",
+                "stack_version" : "2.1"
+              }
+            },
+            {
+              "href" : "http://192.168.56.101:8080/api/v1/stacks/HDP/versions/2.1/services/WEBHCAT/components/WEBHCAT_SERVER/dependencies/ZOOKEEPER_CLIENT",
+              "Dependencies" : {
+                "component_name" : "ZOOKEEPER_CLIENT",
+                "dependent_component_name" : "WEBHCAT_SERVER",
+                "dependent_service_name" : "WEBHCAT",
+                "stack_name" : "HDP",
+                "stack_version" : "2.1"
+              }
+            },
+            {
+              "href" : "http://192.168.56.101:8080/api/v1/stacks/HDP/versions/2.1/services/WEBHCAT/components/WEBHCAT_SERVER/dependencies/ZOOKEEPER_SERVER",
+              "Dependencies" : {
+                "component_name" : "ZOOKEEPER_SERVER",
+                "dependent_component_name" : "WEBHCAT_SERVER",
+                "dependent_service_name" : "WEBHCAT",
+                "stack_name" : "HDP",
+                "stack_version" : "2.1"
+              }
+            }
+          ]
+        },
+        {
           "href" : "http://192.168.56.101:8080/api/v1/stacks/HDP/versions/2.1/services/HIVE/components/MYSQL_SERVER",
           "StackServiceComponents" : {
             "cardinality" : "0-1",
@@ -1236,105 +1283,6 @@
       ]
     },
     {
-      "href" : "http://192.168.56.101:8080/api/v1/stacks/HDP/versions/2.1/services/WEBHCAT",
-      "StackServices" : {
-        "comments" : "This is comment for WEBHCAT service",
-        "custom_commands" : [ ],
-        "display_name" : "WebHCat",
-        "required_services" : [
-          "HIVE",
-          "ZOOKEEPER"
-        ],
-        "service_check_supported" : true,
-        "service_name" : "WEBHCAT",
-        "service_version" : "0.13.0.2.1",
-        "stack_name" : "HDP",
-        "stack_version" : "2.1",
-        "user_name" : null,
-        "config_types" : {
-          "webhcat-env" : {
-            "supports" : {
-              "final" : "false"
-            }
-          },
-          "webhcat-site" : {
-            "supports" : {
-              "final" : "true"
-            }
-          }
-        }
-      },
-      "serviceComponents" : [
-        {
-          "href" : "http://192.168.56.101:8080/api/v1/stacks/HDP/versions/2.1/services/WEBHCAT/components/WEBHCAT_SERVER",
-          "StackServiceComponents" : {
-            "cardinality" : "1",
-            "component_category" : "MASTER",
-            "component_name" : "WEBHCAT_SERVER",
-            "custom_commands" : [ ],
-            "display_name" : "WebHCat Server",
-            "is_client" : false,
-            "is_master" : true,
-            "service_name" : "WEBHCAT",
-            "stack_name" : "HDP",
-            "stack_version" : "2.1"
-          },
-          "dependencies" : [
-            {
-              "href" : "http://192.168.56.101:8080/api/v1/stacks/HDP/versions/2.1/services/WEBHCAT/components/WEBHCAT_SERVER/dependencies/HDFS_CLIENT",
-              "Dependencies" : {
-                "component_name" : "HDFS_CLIENT",
-                "dependent_component_name" : "WEBHCAT_SERVER",
-                "dependent_service_name" : "WEBHCAT",
-                "stack_name" : "HDP",
-                "stack_version" : "2.1"
-              }
-            },
-            {
-              "href" : "http://192.168.56.101:8080/api/v1/stacks/HDP/versions/2.1/services/WEBHCAT/components/WEBHCAT_SERVER/dependencies/MAPREDUCE2_CLIENT",
-              "Dependencies" : {
-                "component_name" : "MAPREDUCE2_CLIENT",
-                "dependent_component_name" : "WEBHCAT_SERVER",
-                "dependent_service_name" : "WEBHCAT",
-                "stack_name" : "HDP",
-                "stack_version" : "2.1"
-              }
-            },
-            {
-              "href" : "http://192.168.56.101:8080/api/v1/stacks/HDP/versions/2.1/services/WEBHCAT/components/WEBHCAT_SERVER/dependencies/YARN_CLIENT",
-              "Dependencies" : {
-                "component_name" : "YARN_CLIENT",
-                "dependent_component_name" : "WEBHCAT_SERVER",
-                "dependent_service_name" : "WEBHCAT",
-                "stack_name" : "HDP",
-                "stack_version" : "2.1"
-              }
-            },
-            {
-              "href" : "http://192.168.56.101:8080/api/v1/stacks/HDP/versions/2.1/services/WEBHCAT/components/WEBHCAT_SERVER/dependencies/ZOOKEEPER_CLIENT",
-              "Dependencies" : {
-                "component_name" : "ZOOKEEPER_CLIENT",
-                "dependent_component_name" : "WEBHCAT_SERVER",
-                "dependent_service_name" : "WEBHCAT",
-                "stack_name" : "HDP",
-                "stack_version" : "2.1"
-              }
-            },
-            {
-              "href" : "http://192.168.56.101:8080/api/v1/stacks/HDP/versions/2.1/services/WEBHCAT/components/WEBHCAT_SERVER/dependencies/ZOOKEEPER_SERVER",
-              "Dependencies" : {
-                "component_name" : "ZOOKEEPER_SERVER",
-                "dependent_component_name" : "WEBHCAT_SERVER",
-                "dependent_service_name" : "WEBHCAT",
-                "stack_name" : "HDP",
-                "stack_version" : "2.1"
-              }
-            }
-          ]
-        }
-      ]
-    },
-    {
       "href" : "http://192.168.56.101:8080/api/v1/stacks/HDP/versions/2.1/services/YARN",
       "StackServices" : {
         "comments" : "Apache Hadoop NextGen MapReduce (YARN)",

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-web/app/assets/data/wizard/stack/hdp/version/1.2.0.json
----------------------------------------------------------------------
diff --git a/ambari-web/app/assets/data/wizard/stack/hdp/version/1.2.0.json b/ambari-web/app/assets/data/wizard/stack/hdp/version/1.2.0.json
index c91a038..9aaa741 100644
--- a/ambari-web/app/assets/data/wizard/stack/hdp/version/1.2.0.json
+++ b/ambari-web/app/assets/data/wizard/stack/hdp/version/1.2.0.json
@@ -247,24 +247,6 @@
       "master" : true
     }
   }, {
-    "name" : "HCATALOG",
-    "version" : "0.4.0.1-1",
-    "user" : "root",
-    "comment" : "This is comment for HCATALOG service",
-    "components" : [ {
-      "name" : "HCAT",
-      "category" : "CLIENT",
-      "client" : true,
-      "master" : false
-    } ],
-    "clientOnlyService" : true,
-    "clientComponent" : {
-      "name" : "HCAT",
-      "category" : "CLIENT",
-      "client" : true,
-      "master" : false
-    }
-  }, {
     "name" : "HIVE",
     "version" : "0.9.0.1-1",
     "user" : "root",

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-web/app/assets/data/wizard/stack/hdp/version/1.2.1.json
----------------------------------------------------------------------
diff --git a/ambari-web/app/assets/data/wizard/stack/hdp/version/1.2.1.json b/ambari-web/app/assets/data/wizard/stack/hdp/version/1.2.1.json
index e267e76..18a4bea 100644
--- a/ambari-web/app/assets/data/wizard/stack/hdp/version/1.2.1.json
+++ b/ambari-web/app/assets/data/wizard/stack/hdp/version/1.2.1.json
@@ -112,17 +112,6 @@
       }
     },
     {
-      "href" : "http://192.168.56.101:8080/api/v1/stacks2/HDP/versions/1.2.1/stackServices/HCATALOG",
-      "StackServices" : {
-        "user_name" : "root",
-        "stack_version" : "1.2.1",
-        "service_name" : "HCATALOG",
-        "stack_name" : "HDP",
-        "comments" : "This is comment for HCATALOG service",
-        "service_version" : "0.5.0"
-      }
-    },
-    {
       "href" : "http://192.168.56.101:8080/api/v1/stacks2/HDP/versions/1.2.1/stackServices/HIVE",
       "StackServices" : {
         "user_name" : "root",

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-web/app/assets/data/wizard/stack/hdp/version/1.3.0.json
----------------------------------------------------------------------
diff --git a/ambari-web/app/assets/data/wizard/stack/hdp/version/1.3.0.json b/ambari-web/app/assets/data/wizard/stack/hdp/version/1.3.0.json
index f313fc1..3750e3f 100644
--- a/ambari-web/app/assets/data/wizard/stack/hdp/version/1.3.0.json
+++ b/ambari-web/app/assets/data/wizard/stack/hdp/version/1.3.0.json
@@ -112,17 +112,6 @@
       }
     },
     {
-      "href" : "http://ec2-23-20-124-167.compute-1.amazonaws.com:8080/api/v1/stacks2/HDP/versions/1.2.1/stackServices/HCATALOG",
-      "StackServices" : {
-        "user_name" : "root",
-        "stack_version" : "1.2.1",
-        "service_name" : "HCATALOG",
-        "stack_name" : "HDP",
-        "comments" : "This is comment for HCATALOG service",
-        "service_version" : "0.5.0"
-      }
-    },
-    {
       "href" : "http://ec2-23-20-124-167.compute-1.amazonaws.com:8080/api/v1/stacks2/HDP/versions/1.2.1/stackServices/GANGLIA",
       "StackServices" : {
         "user_name" : "root",

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-web/app/assets/data/wizard/stack/hdp/version/1.3.1.json
----------------------------------------------------------------------
diff --git a/ambari-web/app/assets/data/wizard/stack/hdp/version/1.3.1.json b/ambari-web/app/assets/data/wizard/stack/hdp/version/1.3.1.json
index 27631ee..20743c9 100644
--- a/ambari-web/app/assets/data/wizard/stack/hdp/version/1.3.1.json
+++ b/ambari-web/app/assets/data/wizard/stack/hdp/version/1.3.1.json
@@ -123,17 +123,6 @@
       }
     },
     {
-      "href" : "http://ec2-23-20-124-167.compute-1.amazonaws.com:8080/api/v1/stacks2/HDP/versions/1.2.1/stackServices/HCATALOG",
-      "StackServices" : {
-        "user_name" : "root",
-        "stack_version" : "1.2.1",
-        "service_name" : "HCATALOG",
-        "stack_name" : "HDP",
-        "comments" : "This is comment for HCATALOG service",
-        "service_version" : "0.5.0"
-      }
-    },
-    {
       "href" : "http://ec2-23-20-124-167.compute-1.amazonaws.com:8080/api/v1/stacks2/HDP/versions/1.2.1/stackServices/GANGLIA",
       "StackServices" : {
         "user_name" : "root",

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-web/app/assets/data/wizard/stack/hdp/version/2.0.1.json
----------------------------------------------------------------------
diff --git a/ambari-web/app/assets/data/wizard/stack/hdp/version/2.0.1.json b/ambari-web/app/assets/data/wizard/stack/hdp/version/2.0.1.json
index 5a28464..439fae1 100644
--- a/ambari-web/app/assets/data/wizard/stack/hdp/version/2.0.1.json
+++ b/ambari-web/app/assets/data/wizard/stack/hdp/version/2.0.1.json
@@ -35,17 +35,6 @@
       }
     },
     {
-      "href" : "http://192.168.56.101:8080/api/v1/stacks2/HDP/versions/2.0.1/stackServices/HCATALOG",
-      "StackServices" : {
-        "user_name" : "root",
-        "stack_version" : "2.0.1",
-        "service_name" : "HCATALOG",
-        "stack_name" : "HDP",
-        "comments" : "This is comment for HCATALOG service",
-        "service_version" : "0.5.0.22-1"
-      }
-    },
-    {
       "href" : "http://192.168.56.101:8080/api/v1/stacks2/HDP/versions/2.0.1/stackServices/WEBHCAT",
       "StackServices" : {
         "user_name" : "root",

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-web/app/assets/data/wizard/stack/hdp/version/2.0.5.json
----------------------------------------------------------------------
diff --git a/ambari-web/app/assets/data/wizard/stack/hdp/version/2.0.5.json b/ambari-web/app/assets/data/wizard/stack/hdp/version/2.0.5.json
index fb7d747..e1b8f67 100644
--- a/ambari-web/app/assets/data/wizard/stack/hdp/version/2.0.5.json
+++ b/ambari-web/app/assets/data/wizard/stack/hdp/version/2.0.5.json
@@ -35,17 +35,6 @@
       }
     },
     {
-      "href" : "http://192.168.56.101:8080/api/v1/stacks2/HDP/versions/2.0.1/stackServices/HCATALOG",
-      "StackServices" : {
-        "user_name" : "root",
-        "stack_version" : "2.0.1",
-        "service_name" : "HCATALOG",
-        "stack_name" : "HDP",
-        "comments" : "This is comment for HCATALOG service",
-        "service_version" : "0.5.0.22-1"
-      }
-    },
-    {
       "href" : "http://192.168.56.101:8080/api/v1/stacks2/HDP/versions/2.0.1/stackServices/WEBHCAT",
       "StackServices" : {
         "user_name" : "root",

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-web/app/assets/data/wizard/stack/hdp/version0.1.json
----------------------------------------------------------------------
diff --git a/ambari-web/app/assets/data/wizard/stack/hdp/version0.1.json b/ambari-web/app/assets/data/wizard/stack/hdp/version0.1.json
index ca4439d..4c5ba7f 100644
--- a/ambari-web/app/assets/data/wizard/stack/hdp/version0.1.json
+++ b/ambari-web/app/assets/data/wizard/stack/hdp/version0.1.json
@@ -215,23 +215,6 @@
       "master" : true
     }
   }, {
-    "name" : "HCATALOG",
-    "version" : "1.0",
-    "user" : "root",
-    "comment" : "This is comment for HCATALOG service",
-    "components" : [ {
-      "name" : "HCAT",
-      "category" : "CLIENT",
-      "client" : true,
-      "master" : false
-    } ],
-    "clientComponent" : {
-      "name" : "HCAT",
-      "category" : "CLIENT",
-      "client" : true,
-      "master" : false
-    }
-  }, {
     "name" : "HIVE",
     "version" : "1.0",
     "user" : "root",

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-web/app/assets/data/wizard/stack/hdp/version01/HCATALOG.json
----------------------------------------------------------------------
diff --git a/ambari-web/app/assets/data/wizard/stack/hdp/version01/HCATALOG.json b/ambari-web/app/assets/data/wizard/stack/hdp/version01/HCATALOG.json
deleted file mode 100644
index c6bb7b6..0000000
--- a/ambari-web/app/assets/data/wizard/stack/hdp/version01/HCATALOG.json
+++ /dev/null
@@ -1,20 +0,0 @@
-{
-  "name" : "HCATALOG",
-  "version" : "0.4.0.1-1",
-  "user" : "root",
-  "comment" : "This is comment for HCATALOG service",
-  "properties" : [ ],
-  "components" : [ {
-    "name" : "HCAT",
-    "category" : "CLIENT",
-    "client" : true,
-    "master" : false
-  } ],
-  "clientOnlyService" : true,
-  "clientComponent" : {
-    "name" : "HCAT",
-    "category" : "CLIENT",
-    "client" : true,
-    "master" : false
-  }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-web/app/assets/data/wizard/stack/hdp/version1.2.1/HCATALOG.json
----------------------------------------------------------------------
diff --git a/ambari-web/app/assets/data/wizard/stack/hdp/version1.2.1/HCATALOG.json b/ambari-web/app/assets/data/wizard/stack/hdp/version1.2.1/HCATALOG.json
deleted file mode 100644
index 957bcd3..0000000
--- a/ambari-web/app/assets/data/wizard/stack/hdp/version1.2.1/HCATALOG.json
+++ /dev/null
@@ -1,4 +0,0 @@
-{
-  "href" : "http://192.168.56.101:8080/api/v1/stacks2/HDP/versions/1.2.1/stackServices/HCATALOG/configurations?fields=*",
-  "items" : [ ]
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-web/app/assets/data/wizard/stack/hdp/version1.3.0/HCATALOG.json
----------------------------------------------------------------------
diff --git a/ambari-web/app/assets/data/wizard/stack/hdp/version1.3.0/HCATALOG.json b/ambari-web/app/assets/data/wizard/stack/hdp/version1.3.0/HCATALOG.json
deleted file mode 100644
index 1180bad..0000000
--- a/ambari-web/app/assets/data/wizard/stack/hdp/version1.3.0/HCATALOG.json
+++ /dev/null
@@ -1,4 +0,0 @@
-{
-  "href" : "http://dev.hortonworks.com:8080/api/v1/stacks2/HDP/versions/1.3.0/stackServices/HCATALOG/configurations?fields=*",
-  "items" : [ ]
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-web/app/assets/data/wizard/stack/hdp/version131/HCATALOG.json
----------------------------------------------------------------------
diff --git a/ambari-web/app/assets/data/wizard/stack/hdp/version131/HCATALOG.json b/ambari-web/app/assets/data/wizard/stack/hdp/version131/HCATALOG.json
deleted file mode 100644
index 1180bad..0000000
--- a/ambari-web/app/assets/data/wizard/stack/hdp/version131/HCATALOG.json
+++ /dev/null
@@ -1,4 +0,0 @@
-{
-  "href" : "http://dev.hortonworks.com:8080/api/v1/stacks2/HDP/versions/1.3.0/stackServices/HCATALOG/configurations?fields=*",
-  "items" : [ ]
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-web/app/assets/data/wizard/stack/hdp/version2.0.1/HCATALOG.json
----------------------------------------------------------------------
diff --git a/ambari-web/app/assets/data/wizard/stack/hdp/version2.0.1/HCATALOG.json b/ambari-web/app/assets/data/wizard/stack/hdp/version2.0.1/HCATALOG.json
deleted file mode 100644
index 1180bad..0000000
--- a/ambari-web/app/assets/data/wizard/stack/hdp/version2.0.1/HCATALOG.json
+++ /dev/null
@@ -1,4 +0,0 @@
-{
-  "href" : "http://dev.hortonworks.com:8080/api/v1/stacks2/HDP/versions/1.3.0/stackServices/HCATALOG/configurations?fields=*",
-  "items" : [ ]
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-web/app/assets/data/wizard/stack/stacks.json
----------------------------------------------------------------------
diff --git a/ambari-web/app/assets/data/wizard/stack/stacks.json b/ambari-web/app/assets/data/wizard/stack/stacks.json
index ebf9d7b..4f51def 100644
--- a/ambari-web/app/assets/data/wizard/stack/stacks.json
+++ b/ambari-web/app/assets/data/wizard/stack/stacks.json
@@ -21,17 +21,6 @@
           }
         },
         {
-          "href" : "http://dev.hortonworks.com:8080/api/v1/stacks2/HDP/versions/1.3.0/stackServices/HCATALOG",
-          "StackServices" : {
-            "user_name" : "root",
-            "stack_version" : "1.3.0",
-            "service_name" : "HCATALOG",
-            "stack_name" : "HDP",
-            "service_version" : "0.5.0",
-            "comments" : "This is comment for HCATALOG service"
-          }
-        },
-        {
           "href" : "http://dev.hortonworks.com:8080/api/v1/stacks2/HDP/versions/1.3.0/stackServices/MAPREDUCE",
           "StackServices" : {
             "user_name" : "mapred",
@@ -98,17 +87,6 @@
           }
         },
         {
-          "href" : "http://dev.hortonworks.com:8080/api/v1/stacks2/HDP/versions/1.3.0/stackServices/WEBHCAT",
-          "StackServices" : {
-            "user_name" : "root",
-            "stack_version" : "1.3.0",
-            "service_name" : "WEBHCAT",
-            "stack_name" : "HDP",
-            "service_version" : "0.5.0",
-            "comments" : "This is comment for WEBHCAT service"
-          }
-        },
-        {
           "href" : "http://dev.hortonworks.com:8080/api/v1/stacks2/HDP/versions/1.3.0/stackServices/SQOOP",
           "StackServices" : {
             "user_name" : "root",
@@ -196,17 +174,6 @@
           }
         },
         {
-          "href" : "http://dev.hortonworks.com:8080/api/v1/stacks2/HDP/versions/1.2.1/stackServices/HCATALOG",
-          "StackServices" : {
-            "user_name" : "root",
-            "stack_version" : "1.2.1",
-            "service_name" : "HCATALOG",
-            "stack_name" : "HDP",
-            "service_version" : "0.5.0",
-            "comments" : "This is comment for HCATALOG service"
-          }
-        },
-        {
           "href" : "http://dev.hortonworks.com:8080/api/v1/stacks2/HDP/versions/1.2.1/stackServices/HDFS",
           "StackServices" : {
             "user_name" : "root",
@@ -273,17 +240,6 @@
           }
         },
         {
-          "href" : "http://dev.hortonworks.com:8080/api/v1/stacks2/HDP/versions/1.2.1/stackServices/WEBHCAT",
-          "StackServices" : {
-            "user_name" : "root",
-            "stack_version" : "1.2.1",
-            "service_name" : "WEBHCAT",
-            "stack_name" : "HDP",
-            "service_version" : "0.5.0",
-            "comments" : "This is comment for WEBHCAT service"
-          }
-        },
-        {
           "href" : "http://dev.hortonworks.com:8080/api/v1/stacks2/HDP/versions/1.2.1/stackServices/SQOOP",
           "StackServices" : {
             "user_name" : "root",
@@ -360,17 +316,6 @@
           }
         },
         {
-          "href" : "http://dev.hortonworks.com:8080/api/v1/stacks2/HDP/versions/1.2.0/stackServices/HCATALOG",
-          "StackServices" : {
-            "user_name" : "root",
-            "stack_version" : "1.2.0",
-            "service_name" : "HCATALOG",
-            "stack_name" : "HDP",
-            "service_version" : "0.5.0",
-            "comments" : "This is comment for HCATALOG service"
-          }
-        },
-        {
           "href" : "http://dev.hortonworks.com:8080/api/v1/stacks2/HDP/versions/1.2.0/stackServices/WEBHCAT",
           "StackServices" : {
             "user_name" : "root",

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-web/app/controllers/main/admin/security/add/step2.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/controllers/main/admin/security/add/step2.js b/ambari-web/app/controllers/main/admin/security/add/step2.js
index 4172299..df500c7 100644
--- a/ambari-web/app/controllers/main/admin/security/add/step2.js
+++ b/ambari-web/app/controllers/main/admin/security/add/step2.js
@@ -76,7 +76,7 @@ App.MainAdminSecurityAddStep2Controller = Em.Controller.extend({
       components: ['HIVE_SERVER']
     },
     {
-      serviceName: 'WEBHCAT',
+      serviceName: 'HIVE',
       configName: 'webhcatserver_host',
       components: ['WEBHCAT_SERVER']
     },
@@ -168,7 +168,7 @@ App.MainAdminSecurityAddStep2Controller = Em.Controller.extend({
       primaryName: 'HTTP/'
     },
     {
-      serviceName: 'WEBHCAT',
+      serviceName: 'HIVE',
       configName: 'webhcatserver_host',
       principalName: 'webHCat_http_principal_name',
       primaryName: 'HTTP/'

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-web/app/controllers/main/admin/serviceAccounts_controller.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/controllers/main/admin/serviceAccounts_controller.js b/ambari-web/app/controllers/main/admin/serviceAccounts_controller.js
index f06d64e..ecb6f20 100644
--- a/ambari-web/app/controllers/main/admin/serviceAccounts_controller.js
+++ b/ambari-web/app/controllers/main/admin/serviceAccounts_controller.js
@@ -122,7 +122,7 @@ App.MainAdminServiceAccountsController = App.MainServiceInfoConfigsController.ex
     var proxyUserGroup = misc_configs.findProperty('name', 'proxyuser_group');
     //stack, with version lower than 2.1, doesn't have Falcon service
     if (proxyUserGroup) {
-      var proxyServices = ['HIVE', 'WEBHCAT', 'OOZIE', 'FALCON'];
+      var proxyServices = ['HIVE', 'OOZIE', 'FALCON'];
       var services = Em.A([]);
       proxyServices.forEach(function (serviceName) {
         var stackService = App.StackService.find(serviceName);

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-web/app/controllers/main/service/info/configs.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/controllers/main/service/info/configs.js b/ambari-web/app/controllers/main/service/info/configs.js
index cea4630..2f79685 100644
--- a/ambari-web/app/controllers/main/service/info/configs.js
+++ b/ambari-web/app/controllers/main/service/info/configs.js
@@ -1698,7 +1698,7 @@ App.MainServiceInfoConfigsController = Em.Controller.extend(App.ServerValidatorM
   addDynamicProperties: function (configs) {
     var allConfigs = this.get('stepConfigs').findProperty('serviceName', this.get('content.serviceName')).get('configs');
     var templetonHiveProperty = allConfigs.someProperty('name', 'templeton.hive.properties');
-    if (!templetonHiveProperty && this.get('content.serviceName') === 'WEBHCAT') {
+    if (!templetonHiveProperty && this.get('content.serviceName') === 'HIVE') {
       configs.pushObject({
         "name": "templeton.hive.properties",
         "templateName": ["hivemetastore_host"],
@@ -2136,7 +2136,7 @@ App.MainServiceInfoConfigsController = Em.Controller.extend(App.ServerValidatorM
       hostProperty: 'hivemetastore_host',
       componentName: 'HIVE_SERVER',
       serviceName: 'HIVE',
-      serviceUseThis: ['WEBHCAT']
+      serviceUseThis: ['HIVE']
     },
     {
       hostProperty: 'oozieserver_host',
@@ -2160,7 +2160,7 @@ App.MainServiceInfoConfigsController = Em.Controller.extend(App.ServerValidatorM
     {
       hostProperty: 'webhcatserver_host',
       componentName: 'WEBHCAT_SERVER',
-      serviceName: 'WEBHCAT',
+      serviceName: 'HIVE',
       serviceUseThis: [],
       m: true
     },
@@ -2168,7 +2168,7 @@ App.MainServiceInfoConfigsController = Em.Controller.extend(App.ServerValidatorM
       hostProperty: 'zookeeperserver_hosts',
       componentName: 'ZOOKEEPER_SERVER',
       serviceName: 'ZOOKEEPER',
-      serviceUseThis: ['HBASE', 'WEBHCAT'],
+      serviceUseThis: ['HBASE', 'HIVE'],
       m: true
     },
     {

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-web/app/controllers/main/service/item.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/controllers/main/service/item.js b/ambari-web/app/controllers/main/service/item.js
index fc62a4e..edb524c 100644
--- a/ambari-web/app/controllers/main/service/item.js
+++ b/ambari-web/app/controllers/main/service/item.js
@@ -96,7 +96,7 @@ App.MainServiceItemController = Em.Controller.extend({
   }.property('content.serviceName'),
 
   isConfigurable: function () {
-    return !App.get('services.noConfigTypes').concat('HCATALOG').contains(this.get('content.serviceName'));
+    return !App.get('services.noConfigTypes').contains(this.get('content.serviceName'));
   }.property('App.services.noConfigTypes','content.serviceName'),
 
   allHosts: [],

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-web/app/controllers/wizard/step7_controller.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/controllers/wizard/step7_controller.js b/ambari-web/app/controllers/wizard/step7_controller.js
index 414dfdd..8afc87d 100644
--- a/ambari-web/app/controllers/wizard/step7_controller.js
+++ b/ambari-web/app/controllers/wizard/step7_controller.js
@@ -166,14 +166,13 @@ App.WizardStep7Controller = Em.Controller.extend(App.ServerValidatorMixin, {
 
   /**
    * List of installed service names
-   * Sqoop and HCatalog are excluded if not installer wizard running
    * @type {string[]}
    */
   installedServiceNames: function () {
     var serviceNames = this.get('content.services').filterProperty('isInstalled').mapProperty('serviceName');
     if (this.get('content.controllerName') !== 'installerController') {
       serviceNames = serviceNames.filter(function(_serviceName){
-        return !App.get('services.noConfigTypes').concat('HCATALOG').contains(_serviceName);
+        return !App.get('services.noConfigTypes').contains(_serviceName);
       });
     }
     return serviceNames;

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-web/app/controllers/wizard/step8_controller.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/controllers/wizard/step8_controller.js b/ambari-web/app/controllers/wizard/step8_controller.js
index 7b7ae11..0607e9c 100644
--- a/ambari-web/app/controllers/wizard/step8_controller.js
+++ b/ambari-web/app/controllers/wizard/step8_controller.js
@@ -1601,7 +1601,6 @@ App.WizardStep8Controller = Em.Controller.extend(App.AddSecurityConfigs, {
       oozieUser = this.get('configs').someProperty('name', 'oozie_user') ? this.get('configs').findProperty('name', 'oozie_user').value : null,
       isHiveSelected = installedAndSelectedServices.someProperty('serviceName', 'HIVE'),
       hiveUser = this.get('configs').someProperty('name', 'hive_user') ? this.get('configs').findProperty('name', 'hive_user').value : null,
-      isHcatSelected = installedAndSelectedServices.someProperty('serviceName', 'WEBHCAT'),
       hcatUser = this.get('configs').someProperty('name', 'hcat_user') ? this.get('configs').findProperty('name', 'hcat_user').value : null,
       isGLUSTERFSSelected = installedAndSelectedServices.someProperty('serviceName', 'GLUSTERFS');
 
@@ -1610,7 +1609,7 @@ App.WizardStep8Controller = Em.Controller.extend(App.AddSecurityConfigs, {
       if (
         (isOozieSelected || (_coreSiteObj.name != 'hadoop.proxyuser.' + oozieUser + '.hosts' && _coreSiteObj.name != 'hadoop.proxyuser.' + oozieUser + '.groups')) &&
         (isHiveSelected || (_coreSiteObj.name != 'hadoop.proxyuser.' + hiveUser + '.hosts' && _coreSiteObj.name != 'hadoop.proxyuser.' + hiveUser + '.groups')) &&
-        (isHcatSelected || (_coreSiteObj.name != 'hadoop.proxyuser.' + hcatUser + '.hosts' && _coreSiteObj.name != 'hadoop.proxyuser.' + hcatUser + '.groups'))) {
+        (isHiveSelected || (_coreSiteObj.name != 'hadoop.proxyuser.' + hcatUser + '.hosts' && _coreSiteObj.name != 'hadoop.proxyuser.' + hcatUser + '.groups'))) {
         coreSiteProperties[_coreSiteObj.name] = App.config.escapeXMLCharacters(_coreSiteObj.value);
       }
       if (isGLUSTERFSSelected && _coreSiteObj.name == "fs.default.name") {

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-web/app/data/HDP2/secure_configs.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/data/HDP2/secure_configs.js b/ambari-web/app/data/HDP2/secure_configs.js
index da543e0..833697d 100644
--- a/ambari-web/app/data/HDP2/secure_configs.js
+++ b/ambari-web/app/data/HDP2/secure_configs.js
@@ -84,20 +84,11 @@ module.exports = [
     displayName: 'Hive',
     filename: 'hive-site',
     configCategories: [
-      App.ServiceConfigCategory.create({ name: 'Hive Metastore', displayName: 'Hive Metastore and Hive Server 2'})
-    ],
-    sites: ['hive-site'],
-    configs: configProperties.filterProperty('serviceName', 'HIVE')
-  },
-  {
-    serviceName: 'WEBHCAT',
-    displayName: 'WebHCat',
-    filename: 'webhcat-site',
-    configCategories: [
+      App.ServiceConfigCategory.create({ name: 'Hive Metastore', displayName: 'Hive Metastore and Hive Server 2'}),
       App.ServiceConfigCategory.create({ name: 'WebHCat Server', displayName : 'WebHCat Server'})
     ],
-    sites: ['webhcat-site'],
-    configs: configProperties.filterProperty('serviceName', 'WEBHCAT')
+    sites: ['hive-site','webhcat-site'],
+    configs: configProperties.filterProperty('serviceName', 'HIVE')
   },
   {
     serviceName: 'HBASE',

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-web/app/data/HDP2/secure_mapping.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/data/HDP2/secure_mapping.js b/ambari-web/app/data/HDP2/secure_mapping.js
index cb92e5a..5d9261c 100644
--- a/ambari-web/app/data/HDP2/secure_mapping.js
+++ b/ambari-web/app/data/HDP2/secure_mapping.js
@@ -505,7 +505,7 @@ module.exports = [
     "foreignKey": null,
     "value": "<templateName[0]>@<templateName[1]>",
     "filename": "webhcat-site.xml",
-    "serviceName": "WEBHCAT"
+    "serviceName": "HIVE"
   },
   {
     "name": "templeton.kerberos.keytab",
@@ -513,7 +513,7 @@ module.exports = [
     "foreignKey": null,
     "value": "<templateName[0]>",
     "filename": "webhcat-site.xml",
-    "serviceName": "WEBHCAT"
+    "serviceName": "HIVE"
   },
   {
     "name": "templeton.kerberos.secret",
@@ -521,7 +521,7 @@ module.exports = [
     "foreignKey": null,
     "value": "secret",
     "filename": "webhcat-site.xml",
-    "serviceName": "WEBHCAT"
+    "serviceName": "HIVE"
   },
   {
     "name": "templeton.hive.properties",
@@ -530,7 +530,7 @@ module.exports = [
     "value": "hive.metastore.local=false,hive.metastore.uris=thrift://<templateName[0]>:9083,hive." +
       "metastore.sasl.enabled=true,hive.metastore.execute.setugi=true,hive.metastore.warehouse.dir=/apps/hive/warehouse,hive.exec.mode.local.auto=false,hive.metastore.kerberos.principal=<templateName[1]>@<templateName[2]>",
     "filename": "webhcat-site.xml",
-    "serviceName": "WEBHCAT"
+    "serviceName": "HIVE"
   },
   {
     "name": "hbase.coprocessor.master.classes",
@@ -689,7 +689,7 @@ module.exports = [
     "foreignKey": ["webHCat_http_primary_name"],
     "value": "<templateName[0]>",
     "filename": "core-site.xml",
-    "serviceName": "WEBHCAT"
+    "serviceName": "HIVE"
   },
   {
     "name": "hadoop.proxyuser.<foreignKey[0]>.hosts",
@@ -697,7 +697,7 @@ module.exports = [
     "foreignKey": ["webHCat_http_primary_name"],
     "value": "<templateName[0]>",
     "filename": "core-site.xml",
-    "serviceName": "WEBHCAT"
+    "serviceName": "HIVE"
   }
 ];
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-web/app/data/HDP2/secure_properties.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/data/HDP2/secure_properties.js b/ambari-web/app/data/HDP2/secure_properties.js
index a2c3b98..95d17cd 100644
--- a/ambari-web/app/data/HDP2/secure_properties.js
+++ b/ambari-web/app/data/HDP2/secure_properties.js
@@ -663,8 +663,6 @@ module.exports =
       "serviceName": "YARN",
       "category": "NodeManager"
     },
-
-  /**********************************************WEBHCAT***************************************/
     {
       "id": "puppet var",
       "name": "webhcatserver_host",
@@ -675,7 +673,7 @@ module.exports =
       "displayType": "masterHost",
       "isVisible": true,
       "isOverridable": false,
-      "serviceName": "WEBHCAT",
+      "serviceName": "HIVE",
       "category": "WebHCat Server"
     },
     {
@@ -688,7 +686,7 @@ module.exports =
       "displayType": "principal",
       "isVisible": true,
       "isOverridable": false,
-      "serviceName": "WEBHCAT",
+      "serviceName": "HIVE",
       "category": "WebHCat Server"
     },
     {
@@ -701,7 +699,7 @@ module.exports =
       "displayType": "directory",
       "isVisible": true,
       "isOverridable": false,
-      "serviceName": "WEBHCAT",
+      "serviceName": "HIVE",
       "category": "WebHCat Server"
     },
 


[07/27] git commit: AMBARI-7310. Error message on ambari-server removal.(vbrodetskyi)

Posted by jo...@apache.org.
AMBARI-7310. Error message on ambari-server removal.(vbrodetskyi)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/e3dba263
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/e3dba263
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/e3dba263

Branch: refs/heads/branch-alerts-dev
Commit: e3dba263475ab4a29c57ed041539eebbef8f16d7
Parents: 1719a5c
Author: Vitaly Brodetskyi <vb...@hortonworks.com>
Authored: Mon Sep 15 18:38:01 2014 +0300
Committer: Vitaly Brodetskyi <vb...@hortonworks.com>
Committed: Mon Sep 15 18:38:01 2014 +0300

----------------------------------------------------------------------
 ambari-agent/src/main/package/rpm/preremove.sh  | 2 +-
 ambari-server/src/main/package/rpm/preremove.sh | 2 +-
 2 files changed, 2 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/e3dba263/ambari-agent/src/main/package/rpm/preremove.sh
----------------------------------------------------------------------
diff --git a/ambari-agent/src/main/package/rpm/preremove.sh b/ambari-agent/src/main/package/rpm/preremove.sh
index 5fb9be2..2078acd 100644
--- a/ambari-agent/src/main/package/rpm/preremove.sh
+++ b/ambari-agent/src/main/package/rpm/preremove.sh
@@ -29,7 +29,7 @@ if [ "$1" -eq 0 ]; then  # Action is uninstall
       /var/lib/ambari-agent/install-helper.sh remove
     fi
 
-    chkconfig --del ambari-agent
+    chkconfig --list | grep ambari-server && chkconfig --del ambari-server
 fi
 
 exit 0

http://git-wip-us.apache.org/repos/asf/ambari/blob/e3dba263/ambari-server/src/main/package/rpm/preremove.sh
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/package/rpm/preremove.sh b/ambari-server/src/main/package/rpm/preremove.sh
index 073c07f..12b69b6 100644
--- a/ambari-server/src/main/package/rpm/preremove.sh
+++ b/ambari-server/src/main/package/rpm/preremove.sh
@@ -34,7 +34,7 @@ if [ "$1" -eq 0 ]; then  # Action is uninstall
       /var/lib/ambari-server/install-helper.sh remove
     fi
 
-    chkconfig --del ambari-server
+    chkconfig --list | grep ambari-server && chkconfig --del ambari-server
 fi
 
 exit 0


[18/27] AMBARI-7296. HCatalog and WebHCat services should not be managed as separate service (should be part of Hive service) (jaimin)

Posted by jo...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/package/scripts/webhcat.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/package/scripts/webhcat.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/package/scripts/webhcat.py
deleted file mode 100644
index c56ae5f..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/package/scripts/webhcat.py
+++ /dev/null
@@ -1,112 +0,0 @@
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-Ambari Agent
-
-"""
-from resource_management import *
-import sys
-
-
-def webhcat():
-  import params
-
-  params.HdfsDirectory(params.webhcat_apps_dir,
-                       action="create_delayed",
-                       owner=params.webhcat_user,
-                       mode=0755
-  )
-  if params.hcat_hdfs_user_dir != params.webhcat_hdfs_user_dir:
-    params.HdfsDirectory(params.hcat_hdfs_user_dir,
-                         action="create_delayed",
-                         owner=params.hcat_user,
-                         mode=params.hcat_hdfs_user_mode
-    )
-  params.HdfsDirectory(params.webhcat_hdfs_user_dir,
-                       action="create_delayed",
-                       owner=params.webhcat_user,
-                       mode=params.webhcat_hdfs_user_mode
-  )
-  params.HdfsDirectory(None, action="create")
-
-  Directory(params.templeton_pid_dir,
-            owner=params.webhcat_user,
-            mode=0755,
-            group=params.user_group,
-            recursive=True)
-
-  Directory(params.templeton_log_dir,
-            owner=params.webhcat_user,
-            mode=0755,
-            group=params.user_group,
-            recursive=True)
-
-  Directory(params.config_dir,
-            owner=params.webhcat_user,
-            group=params.user_group)
-
-  XmlConfig("webhcat-site.xml",
-            conf_dir=params.config_dir,
-            configurations=params.config['configurations']['webhcat-site'],
-            configuration_attributes=params.config['configuration_attributes']['webhcat-site'],
-            owner=params.webhcat_user,
-            group=params.user_group,
-  )
-
-  File(format("{config_dir}/webhcat-env.sh"),
-       owner=params.webhcat_user,
-       group=params.user_group,
-       content=InlineTemplate(params.webhcat_env_sh_template)
-  )
-
-  if params.security_enabled:
-    kinit_if_needed = format("{kinit_path_local} -kt {hdfs_user_keytab} {hdfs_principal_name};")
-  else:
-    kinit_if_needed = ""
-
-  if kinit_if_needed:
-    Execute(kinit_if_needed,
-            user=params.webhcat_user,
-            path='/bin'
-    )
-
-  CopyFromLocal(params.hadoop_streeming_jars,
-                owner=params.webhcat_user,
-                mode=0755,
-                dest_dir=params.webhcat_apps_dir,
-                kinnit_if_needed=kinit_if_needed,
-                hdfs_user=params.hdfs_user,
-                hadoop_conf_dir=params.hadoop_conf_dir
-  )
-
-  CopyFromLocal('/usr/share/HDP-webhcat/pig.tar.gz',
-                owner=params.webhcat_user,
-                mode=0755,
-                dest_dir=params.webhcat_apps_dir,
-                kinnit_if_needed=kinit_if_needed,
-                hdfs_user=params.hdfs_user,
-                hadoop_conf_dir=params.hadoop_conf_dir
-  )
-
-  CopyFromLocal('/usr/share/HDP-webhcat/hive.tar.gz',
-                owner=params.webhcat_user,
-                mode=0755,
-                dest_dir=params.webhcat_apps_dir,
-                kinnit_if_needed=kinit_if_needed,
-                hdfs_user=params.hdfs_user,
-                hadoop_conf_dir=params.hadoop_conf_dir
-  )

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/package/scripts/webhcat_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/package/scripts/webhcat_server.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/package/scripts/webhcat_server.py
deleted file mode 100644
index 2111fa4..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/package/scripts/webhcat_server.py
+++ /dev/null
@@ -1,53 +0,0 @@
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-Ambari Agent
-
-"""
-import sys
-from resource_management import *
-
-from webhcat import webhcat
-from webhcat_service import webhcat_service
-
-class WebHCatServer(Script):
-  def install(self, env):
-    self.install_packages(env)
-  def configure(self, env):
-    import params
-    env.set_params(params)
-    webhcat()
-
-  def start(self, env):
-    import params
-    env.set_params(params)
-    self.configure(env) # FOR SECURITY
-    webhcat_service(action = 'start')
-
-  def stop(self, env):
-    import params
-    env.set_params(params)
-
-    webhcat_service(action = 'stop')
-
-  def status(self, env):
-    import status_params
-    env.set_params(status_params)
-    check_process_status(status_params.pid_file)
-
-if __name__ == "__main__":
-  WebHCatServer().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/package/scripts/webhcat_service.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/package/scripts/webhcat_service.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/package/scripts/webhcat_service.py
deleted file mode 100644
index a92446d..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/package/scripts/webhcat_service.py
+++ /dev/null
@@ -1,40 +0,0 @@
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-Ambari Agent
-
-"""
-from resource_management import *
-
-def webhcat_service(action='start'):
-  import params
-
-  cmd = format('env HADOOP_HOME={hadoop_home} {webhcat_bin_dir}/webhcat_server.sh')
-
-  if action == 'start':
-    demon_cmd = format('{cmd} start')
-    no_op_test = format('ls {pid_file} >/dev/null 2>&1 && ps `cat {pid_file}` >/dev/null 2>&1')
-    Execute(demon_cmd,
-            user=params.webhcat_user,
-            not_if=no_op_test
-    )
-  elif action == 'stop':
-    demon_cmd = format('{cmd} stop')
-    Execute(demon_cmd,
-            user=params.webhcat_user
-    )
-    Execute(format('rm -f {pid_file}'))

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/role_command_order.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/role_command_order.json b/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/role_command_order.json
index 218ba8a..82cbd79 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/role_command_order.json
+++ b/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/role_command_order.json
@@ -26,10 +26,8 @@
         "WEBHCAT_SERVER-START", "FLUME_HANDLER-START"],
     "MAPREDUCE_SERVICE_CHECK-SERVICE_CHECK": ["RESOURCEMANAGER-START", "NODEMANAGER-START"],
     "OOZIE_SERVICE_CHECK-SERVICE_CHECK": ["OOZIE_SERVER-START"],
-    "WEBHCAT_SERVICE_CHECK-SERVICE_CHECK": ["WEBHCAT_SERVER-START"],
     "HBASE_SERVICE_CHECK-SERVICE_CHECK": ["HBASE_MASTER-START", "HBASE_REGIONSERVER-START"],
-    "HIVE_SERVICE_CHECK-SERVICE_CHECK": ["HIVE_SERVER-START", "HIVE_METASTORE-START"],
-    "HCAT_SERVICE_CHECK-SERVICE_CHECK": ["HIVE_SERVER-START"],
+    "HIVE_SERVICE_CHECK-SERVICE_CHECK": ["HIVE_SERVER-START", "HIVE_METASTORE-START", "WEBHCAT_SERVER-START"],
     "PIG_SERVICE_CHECK-SERVICE_CHECK": ["RESOURCEMANAGER-START", "NODEMANAGER-START"],
     "SQOOP_SERVICE_CHECK-SERVICE_CHECK": ["RESOURCEMANAGER-START", "NODEMANAGER-START"],
     "ZOOKEEPER_SERVICE_CHECK-SERVICE_CHECK": ["ZOOKEEPER_SERVER-START"],

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/HIVE/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/HIVE/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/HIVE/metainfo.xml
index 38280ae..77cf405 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/HIVE/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/HIVE/metainfo.xml
@@ -23,62 +23,78 @@
       <comment>Data warehouse system for ad-hoc queries &amp; analysis of large datasets and table &amp; storage management service</comment>
       <version>0.13.0.2.1</version>
 
-      <configuration-dependencies>
-        <config-type>hive-site</config-type>
-        <config-type>hive-log4j</config-type>
-        <config-type>hive-exec-log4j</config-type>
-        <config-type>global</config-type>
-        <config-type>mapred-site</config-type>
-      </configuration-dependencies>
-    </service>
-
-    <service>
-      <name>HCATALOG</name>
-      <comment>This is comment for HCATALOG service</comment>
-      <version>0.12.0.2.1</version>
       <components>
         <component>
-          <name>HCAT</name>
-          <category>CLIENT</category>
-          <commandScript>
-            <script>scripts/hcat_client.py</script>
-            <scriptType>PYTHON</scriptType>
-          </commandScript>
-          <configFiles>
-            <configFile>
-              <type>xml</type>
-              <fileName>hive-site.xml</fileName>
-              <dictionaryName>hive-site</dictionaryName>
-            </configFile>
-            <configFile>
-              <type>env</type>
-              <fileName>hive-env.sh</fileName>
-              <dictionaryName>hive-env</dictionaryName>
-            </configFile>
-            <configFile>
-              <type>env</type>
-              <fileName>hive-log4j.properties</fileName>
-              <dictionaryName>hive-log4j</dictionaryName>
-            </configFile>
-            <configFile>
-              <type>env</type>
-              <fileName>hive-exec-log4j.properties</fileName>
-              <dictionaryName>hive-exec-log4j</dictionaryName>
-            </configFile>
-          </configFiles>
+          <name>HIVE_SERVER</name>
+          <dependencies>
+            <dependency>
+              <name>TEZ/TEZ_CLIENT</name>
+              <scope>host</scope>
+              <auto-deploy>
+                <enabled>true</enabled>
+              </auto-deploy>
+            </dependency>
+          </dependencies>
         </component>
       </components>
+
       <osSpecifics>
         <osSpecific>
           <osFamily>any</osFamily>
           <packages>
             <package>
+              <name>hive</name>
+            </package>
+            <package>
               <name>hive-hcatalog</name>
             </package>
+            <package>
+              <name>hive-webhcat</name>
+            </package>
+            <package>
+              <name>webhcat-tar-hive</name>
+            </package>
+            <package>
+              <name>webhcat-tar-pig</name>
+            </package>
+            <package>
+              <name>mysql-connector-java</name>
+            </package>
+          </packages>
+        </osSpecific>
+        <osSpecific>
+          <osFamily>redhat5,redhat6,suse11</osFamily>
+          <packages>
+            <package>
+              <name>mysql</name>
+            </package>
+          </packages>
+        </osSpecific>
+        <osSpecific>
+          <osFamily>redhat5,redhat6,ubuntu12</osFamily>
+          <packages>
+            <package>
+              <name>mysql-server</name>
+            </package>
+          </packages>
+        </osSpecific>
+        <osSpecific>
+          <osFamily>suse11</osFamily>
+          <packages>
+            <package>
+              <name>mysql-client</name>
+            </package>
           </packages>
         </osSpecific>
       </osSpecifics>
-    </service>
 
+      <configuration-dependencies>
+        <config-type>hive-site</config-type>
+        <config-type>hive-log4j</config-type>
+        <config-type>hive-exec-log4j</config-type>
+        <config-type>global</config-type>
+        <config-type>mapred-site</config-type>
+      </configuration-dependencies>
+    </service>
   </services>
 </metainfo>

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/WEBHCAT/configuration/webhcat-site.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/WEBHCAT/configuration/webhcat-site.xml b/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/WEBHCAT/configuration/webhcat-site.xml
deleted file mode 100644
index 0eca719..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/WEBHCAT/configuration/webhcat-site.xml
+++ /dev/null
@@ -1,143 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!-- 
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
--->
-
-<!-- The default settings for Templeton. -->
-<!-- Edit templeton-site.xml to change settings for your local -->
-<!-- install. -->
-
-<configuration supports_final="true">
-
-  <property>
-    <name>templeton.port</name>
-      <value>50111</value>
-    <description>The HTTP port for the main server.</description>
-  </property>
-
-  <property>
-    <name>templeton.hadoop.conf.dir</name>
-    <value>/etc/hadoop/conf</value>
-    <description>The path to the Hadoop configuration.</description>
-  </property>
-
-  <property>
-    <name>templeton.jar</name>
-    <value>/usr/lib/hcatalog/share/webhcat/svr/webhcat.jar</value>
-    <description>The path to the Templeton jar file.</description>
-  </property>
-
-  <property>
-    <name>templeton.libjars</name>
-    <value>/usr/lib/zookeeper/zookeeper.jar</value>
-    <description>Jars to add the the classpath.</description>
-  </property>
-
-
-  <property>
-    <name>templeton.hadoop</name>
-    <value>/usr/bin/hadoop</value>
-    <description>The path to the Hadoop executable.</description>
-  </property>
-
-  <property>
-    <name>templeton.pig.archive</name>
-    <value>glusterfs:///apps/webhcat/pig.tar.gz</value>
-    <description>The path to the Pig archive.</description>
-  </property>
-
-  <property>
-    <name>templeton.pig.path</name>
-    <value>pig.tar.gz/pig/bin/pig</value>
-    <description>The path to the Pig executable.</description>
-  </property>
-
-  <property>
-    <name>templeton.hcat</name>
-    <value>/usr/bin/hcat</value>
-    <description>The path to the hcatalog executable.</description>
-  </property>
-
-  <property>
-    <name>templeton.hive.archive</name>
-    <value>glusterfs:///apps/webhcat/hive.tar.gz</value>
-    <description>The path to the Hive archive.</description>
-  </property>
-
-  <property>
-    <name>templeton.hive.home</name>
-    <value>hive.tar.gz/hive</value>
-    <description>The path to the Hive home within the tar. Has no effect if templeton.hive.archive is not set.</description>
-  </property>
-
-  <property>
-    <name>templeton.hcat.home</name>
-    <value>hive.tar.gz/hive/hcatalog</value>
-    <description>The path to the HCat home within the tar. Has no effect if templeton.hive.archive is not set.</description>
-  </property>
-
-  <property>
-    <name>templeton.hive.path</name>
-    <value>hive.tar.gz/hive/bin/hive</value>
-    <description>The path to the Hive executable.</description>
-  </property>
-
-  <property>
-    <name>templeton.hive.properties</name>
-    <value>hive.metastore.local=false, hive.metastore.uris=thrift://localhost:9933, hive.metastore.sasl.enabled=false</value>
-    <description>Properties to set when running hive.</description>
-  </property>
-
-  <property>
-    <name>templeton.zookeeper.hosts</name>
-    <value>localhost:2181</value>
-    <description>ZooKeeper servers, as comma separated host:port pairs</description>
-  </property>
-
-  <property>
-    <name>templeton.storage.class</name>
-    <value>org.apache.hive.hcatalog.templeton.tool.ZooKeeperStorage</value>
-    <description>The class to use as storage</description>
-  </property>
-
-  <property>
-   <name>templeton.override.enabled</name>
-   <value>false</value>
-   <description>
-     Enable the override path in templeton.override.jars
-   </description>
- </property>
-
- <property>
-    <name>templeton.streaming.jar</name>
-    <value>glusterfs:///apps/webhcat/hadoop-streaming.jar</value>
-    <description>The glusterfs path to the Hadoop streaming jar file.</description>
-  </property> 
-
-  <property>
-    <name>templeton.exec.timeout</name>
-    <value>60000</value>
-    <description>Time out for templeton api</description>
-  </property>
-
-  <property>
-    <name>templeton.hadoop.queue.name</name>
-    <value>default</value>
-    <description>MapReduce queue name where WebHCat map-only jobs will be submitted to. Can be used to avoid a deadlock where all map slots in the cluster are taken over by Templeton launcher tasks.</description>
-  </property>
-
-</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/WEBHCAT/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/WEBHCAT/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/WEBHCAT/metainfo.xml
deleted file mode 100644
index fc985f9..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/WEBHCAT/metainfo.xml
+++ /dev/null
@@ -1,46 +0,0 @@
-<?xml version="1.0"?>
-<!--
-   Licensed to the Apache Software Foundation (ASF) under one or more
-   contributor license agreements.  See the NOTICE file distributed with
-   this work for additional information regarding copyright ownership.
-   The ASF licenses this file to You under the Apache License, Version 2.0
-   (the "License"); you may not use this file except in compliance with
-   the License.  You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-   Unless required by applicable law or agreed to in writing, software
-   distributed under the License is distributed on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-   See the License for the specific language governing permissions and
-   limitations under the License.
--->
-<metainfo>
-  <schemaVersion>2.0</schemaVersion>
-  <services>
-    <service>
-      <name>WEBHCAT</name>
-      <comment>This is comment for WEBHCAT service</comment>
-      <version>0.13.0.2.1</version>
-      <osSpecifics>
-        <osSpecific>
-          <osFamily>any</osFamily>
-          <packages>
-            <package>
-              <name>hive-webhcat</name>
-            </package>
-            <package>
-              <name>webhcat-tar-hive</name>
-            </package>
-            <package>
-              <name>webhcat-tar-pig</name>
-            </package>
-          </packages>
-        </osSpecific>
-      </osSpecifics>
-      <configuration-dependencies>
-        <config-type>webhcat-site</config-type>
-      </configuration-dependencies>
-    </service>
-  </services>
-</metainfo>

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-server/src/main/resources/stacks/HDP/2.1/role_command_order.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.1/role_command_order.json b/ambari-server/src/main/resources/stacks/HDP/2.1/role_command_order.json
index b97a31a..a958e2e 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.1/role_command_order.json
+++ b/ambari-server/src/main/resources/stacks/HDP/2.1/role_command_order.json
@@ -26,9 +26,8 @@
         "WEBHCAT_SERVER-START", "FLUME_HANDLER-START"],
     "MAPREDUCE_SERVICE_CHECK-SERVICE_CHECK": ["NODEMANAGER-START", "RESOURCEMANAGER-START"],
     "OOZIE_SERVICE_CHECK-SERVICE_CHECK": ["OOZIE_SERVER-START"],
-    "WEBHCAT_SERVICE_CHECK-SERVICE_CHECK": ["WEBHCAT_SERVER-START"],
     "HBASE_SERVICE_CHECK-SERVICE_CHECK": ["HBASE_MASTER-START", "HBASE_REGIONSERVER-START"],
-    "HIVE_SERVICE_CHECK-SERVICE_CHECK": ["HIVE_SERVER-START", "HIVE_METASTORE-START"],
+    "HIVE_SERVICE_CHECK-SERVICE_CHECK": ["HIVE_SERVER-START", "HIVE_METASTORE-START", "WEBHCAT_SERVER-START"],
     "HCAT_SERVICE_CHECK-SERVICE_CHECK": ["HIVE_SERVER-START"],
     "PIG_SERVICE_CHECK-SERVICE_CHECK": ["NODEMANAGER-START", "RESOURCEMANAGER-START"],
     "SQOOP_SERVICE_CHECK-SERVICE_CHECK": ["NODEMANAGER-START", "RESOURCEMANAGER-START"],

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-server/src/main/resources/stacks/HDP/2.1/services/HIVE/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.1/services/HIVE/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.1/services/HIVE/metainfo.xml
index b9baa9c..0c45712 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.1/services/HIVE/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.1/services/HIVE/metainfo.xml
@@ -37,31 +37,63 @@
           </dependencies>
         </component>
       </components>
-
-      <configuration-dependencies>
-        <config-type>hive-site</config-type>
-        <config-type>hive-log4j</config-type>
-        <config-type>hive-exec-log4j</config-type>
-        <config-type>tez-site</config-type>
-        <config-type>hive-env</config-type>
-      </configuration-dependencies>
-    </service>
-
-    <service>
-      <name>HCATALOG</name>
-      <comment>This is comment for HCATALOG service</comment>
-      <version>0.12.0.2.1</version>
       <osSpecifics>
         <osSpecific>
           <osFamily>any</osFamily>
           <packages>
             <package>
+              <name>hive</name>
+            </package>
+            <package>
               <name>hive-hcatalog</name>
             </package>
+            <package>
+              <name>hive-webhcat</name>
+            </package>
+            <package>
+              <name>webhcat-tar-hive</name>
+            </package>
+            <package>
+              <name>webhcat-tar-pig</name>
+            </package>
+            <package>
+              <name>mysql-connector-java</name>
+            </package>
+          </packages>
+        </osSpecific>
+        <osSpecific>
+          <osFamily>redhat5,redhat6,suse11</osFamily>
+          <packages>
+            <package>
+              <name>mysql</name>
+            </package>
+          </packages>
+        </osSpecific>
+        <osSpecific>
+          <osFamily>redhat5,redhat6,ubuntu12</osFamily>
+          <packages>
+            <package>
+              <name>mysql-server</name>
+            </package>
+          </packages>
+        </osSpecific>
+        <osSpecific>
+          <osFamily>suse11</osFamily>
+          <packages>
+            <package>
+              <name>mysql-client</name>
+            </package>
           </packages>
         </osSpecific>
       </osSpecifics>
-    </service>
 
+      <configuration-dependencies>
+        <config-type>hive-site</config-type>
+        <config-type>hive-log4j</config-type>
+        <config-type>hive-exec-log4j</config-type>
+        <config-type>tez-site</config-type>
+        <config-type>hive-env</config-type>
+      </configuration-dependencies>
+    </service>
   </services>
 </metainfo>

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-server/src/main/resources/stacks/HDP/2.1/services/WEBHCAT/configuration/webhcat-site.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.1/services/WEBHCAT/configuration/webhcat-site.xml b/ambari-server/src/main/resources/stacks/HDP/2.1/services/WEBHCAT/configuration/webhcat-site.xml
deleted file mode 100644
index 9b6c14e..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/2.1/services/WEBHCAT/configuration/webhcat-site.xml
+++ /dev/null
@@ -1,143 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!-- 
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
--->
-
-<!-- The default settings for Templeton. -->
-<!-- Edit templeton-site.xml to change settings for your local -->
-<!-- install. -->
-
-<configuration supports_final="true">
-
-  <property>
-    <name>templeton.port</name>
-      <value>50111</value>
-    <description>The HTTP port for the main server.</description>
-  </property>
-
-  <property>
-    <name>templeton.hadoop.conf.dir</name>
-    <value>/etc/hadoop/conf</value>
-    <description>The path to the Hadoop configuration.</description>
-  </property>
-
-  <property>
-    <name>templeton.jar</name>
-    <value>/usr/lib/hcatalog/share/webhcat/svr/webhcat.jar</value>
-    <description>The path to the Templeton jar file.</description>
-  </property>
-
-  <property>
-    <name>templeton.libjars</name>
-    <value>/usr/lib/zookeeper/zookeeper.jar</value>
-    <description>Jars to add the the classpath.</description>
-  </property>
-
-
-  <property>
-    <name>templeton.hadoop</name>
-    <value>/usr/bin/hadoop</value>
-    <description>The path to the Hadoop executable.</description>
-  </property>
-
-  <property>
-    <name>templeton.pig.archive</name>
-    <value>hdfs:///apps/webhcat/pig.tar.gz</value>
-    <description>The path to the Pig archive.</description>
-  </property>
-
-  <property>
-    <name>templeton.pig.path</name>
-    <value>pig.tar.gz/pig/bin/pig</value>
-    <description>The path to the Pig executable.</description>
-  </property>
-
-  <property>
-    <name>templeton.hcat</name>
-    <value>/usr/bin/hcat</value>
-    <description>The path to the hcatalog executable.</description>
-  </property>
-
-  <property>
-    <name>templeton.hive.archive</name>
-    <value>hdfs:///apps/webhcat/hive.tar.gz</value>
-    <description>The path to the Hive archive.</description>
-  </property>
-
-  <property>
-    <name>templeton.hive.home</name>
-    <value>hive.tar.gz/hive</value>
-    <description>The path to the Hive home within the tar. Has no effect if templeton.hive.archive is not set.</description>
-  </property>
-
-  <property>
-    <name>templeton.hcat.home</name>
-    <value>hive.tar.gz/hive/hcatalog</value>
-    <description>The path to the HCat home within the tar. Has no effect if templeton.hive.archive is not set.</description>
-  </property>
-
-  <property>
-    <name>templeton.hive.path</name>
-    <value>hive.tar.gz/hive/bin/hive</value>
-    <description>The path to the Hive executable.</description>
-  </property>
-
-  <property>
-    <name>templeton.hive.properties</name>
-    <value>hive.metastore.local=false, hive.metastore.uris=thrift://localhost:9933, hive.metastore.sasl.enabled=false</value>
-    <description>Properties to set when running hive.</description>
-  </property>
-
-  <property>
-    <name>templeton.zookeeper.hosts</name>
-    <value>localhost:2181</value>
-    <description>ZooKeeper servers, as comma separated host:port pairs</description>
-  </property>
-
-  <property>
-    <name>templeton.storage.class</name>
-    <value>org.apache.hive.hcatalog.templeton.tool.ZooKeeperStorage</value>
-    <description>The class to use as storage</description>
-  </property>
-
-  <property>
-   <name>templeton.override.enabled</name>
-   <value>false</value>
-   <description>
-     Enable the override path in templeton.override.jars
-   </description>
- </property>
-
- <property>
-    <name>templeton.streaming.jar</name>
-    <value>hdfs:///apps/webhcat/hadoop-streaming.jar</value>
-    <description>The hdfs path to the Hadoop streaming jar file.</description>
-  </property> 
-
-  <property>
-    <name>templeton.exec.timeout</name>
-    <value>60000</value>
-    <description>Time out for templeton api</description>
-  </property>
-
-  <property>
-    <name>templeton.hadoop.queue.name</name>
-    <value>default</value>
-    <description>MapReduce queue name where WebHCat map-only jobs will be submitted to. Can be used to avoid a deadlock where all map slots in the cluster are taken over by Templeton launcher tasks.</description>
-  </property>
-
-</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-server/src/main/resources/stacks/HDP/2.1/services/WEBHCAT/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.1/services/WEBHCAT/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.1/services/WEBHCAT/metainfo.xml
deleted file mode 100644
index ea621c6..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/2.1/services/WEBHCAT/metainfo.xml
+++ /dev/null
@@ -1,47 +0,0 @@
-<?xml version="1.0"?>
-<!--
-   Licensed to the Apache Software Foundation (ASF) under one or more
-   contributor license agreements.  See the NOTICE file distributed with
-   this work for additional information regarding copyright ownership.
-   The ASF licenses this file to You under the Apache License, Version 2.0
-   (the "License"); you may not use this file except in compliance with
-   the License.  You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-   Unless required by applicable law or agreed to in writing, software
-   distributed under the License is distributed on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-   See the License for the specific language governing permissions and
-   limitations under the License.
--->
-<metainfo>
-  <schemaVersion>2.0</schemaVersion>
-  <services>
-    <service>
-      <name>WEBHCAT</name>
-      <comment>This is comment for WEBHCAT service</comment>
-      <version>0.13.0.2.1</version>
-      <osSpecifics>
-        <osSpecific>
-          <osFamily>any</osFamily>
-          <packages>
-            <package>
-              <name>hive-webhcat</name>
-            </package>
-            <package>
-              <name>webhcat-tar-hive</name>
-            </package>
-            <package>
-              <name>webhcat-tar-pig</name>
-            </package>
-          </packages>
-        </osSpecific>
-      </osSpecifics>
-      <configuration-dependencies>
-        <config-type>webhcat-site</config-type>
-        <config-type>webhcat-env</config-type>
-      </configuration-dependencies>
-    </service>
-  </services>
-</metainfo>

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-server/src/main/resources/stacks/HDP/2.2.1/services/HIVE/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2.1/services/HIVE/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.2.1/services/HIVE/metainfo.xml
index 686d596..10b8e4e 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.2.1/services/HIVE/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2.1/services/HIVE/metainfo.xml
@@ -23,9 +23,5 @@
       <comment>Data warehouse system for ad-hoc queries &amp; analysis of large datasets and table &amp; storage management service</comment>
       <version>0.14.0.2.2</version>
     </service>
-    <service>
-      <name>HCATALOG</name>
-      <version>0.14.0.2.2</version>
-    </service>
   </services>
 </metainfo>

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-server/src/main/resources/stacks/HDP/2.2.1/services/WEBHCAT/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2.1/services/WEBHCAT/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.2.1/services/WEBHCAT/metainfo.xml
deleted file mode 100644
index a5222aa..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/2.2.1/services/WEBHCAT/metainfo.xml
+++ /dev/null
@@ -1,26 +0,0 @@
-<?xml version="1.0"?>
-<!--
-   Licensed to the Apache Software Foundation (ASF) under one or more
-   contributor license agreements.  See the NOTICE file distributed with
-   this work for additional information regarding copyright ownership.
-   The ASF licenses this file to You under the Apache License, Version 2.0
-   (the "License"); you may not use this file except in compliance with
-   the License.  You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-   Unless required by applicable law or agreed to in writing, software
-   distributed under the License is distributed on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-   See the License for the specific language governing permissions and
-   limitations under the License.
--->
-<metainfo>
-  <schemaVersion>2.0</schemaVersion>
-  <services>
-    <service>
-      <name>WEBHCAT</name>
-      <version>0.14.0.2.2</version>
-    </service>
-  </services>
-</metainfo>

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-server/src/main/resources/stacks/HDP/2.2/services/HIVE/configuration/webhcat-site.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2/services/HIVE/configuration/webhcat-site.xml b/ambari-server/src/main/resources/stacks/HDP/2.2/services/HIVE/configuration/webhcat-site.xml
new file mode 100644
index 0000000..ce9f2c5
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2/services/HIVE/configuration/webhcat-site.xml
@@ -0,0 +1,59 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!-- 
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+-->
+
+<!-- The default settings for Templeton. -->
+<!-- Edit templeton-site.xml to change settings for your local -->
+<!-- install. -->
+
+<configuration supports_final="true">
+
+  <property>
+    <name>templeton.hadoop.conf.dir</name>
+    <value>/usr/hdp/2.9.9.9-117/etc/hadoop/conf</value>
+    <description>The path to the Hadoop configuration.</description>
+  </property>
+
+  <property>
+    <name>templeton.jar</name>
+    <value>/usr/hdp/2.9.9.9-117/hcatalog/share/webhcat/svr/webhcat.jar</value>
+    <description>The path to the Templeton jar file.</description>
+  </property>
+
+  <property>
+    <name>templeton.libjars</name>
+    <value>/usr/hdp/2.9.9.9-117/zookeeper/zookeeper.jar</value>
+    <description>Jars to add the the classpath.</description>
+  </property>
+
+
+  <property>
+    <name>templeton.hadoop</name>
+    <value>/usr/hdp/2.9.9.9-117/hadoop/bin/hadoop</value>
+    <description>The path to the Hadoop executable.</description>
+  </property>
+
+
+  <property>
+    <name>templeton.hcat</name>
+    <value>/usr/hdp/2.9.9.9-117/hive/bin/hcat</value>
+    <description>The path to the hcatalog executable.</description>
+  </property>
+
+
+</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-server/src/main/resources/stacks/HDP/2.2/services/HIVE/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2/services/HIVE/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.2/services/HIVE/metainfo.xml
index 7548296..effe437 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.2/services/HIVE/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2/services/HIVE/metainfo.xml
@@ -32,6 +32,18 @@
             <package>
               <name>mysql-connector-java</name>
             </package>
+            <package>
+              <name>hive_2_9_9_9_117-hcatalog</name>
+            </package>
+            <package>
+              <name>hive_2_9_9_9_117-webhcat</name>
+            </package>
+            <package>
+              <name>webhcat-tar-hive</name>
+            </package>
+            <package>
+              <name>webhcat-tar-pig</name>
+            </package>
           </packages>
         </osSpecific>
         <osSpecific>
@@ -60,22 +72,5 @@
         </osSpecific>
       </osSpecifics>
     </service>
-
-    <service>
-      <name>HCATALOG</name>
-      <comment>This is comment for HCATALOG service</comment>
-      <version>0.14.0.2.9.9.9</version>
-      <osSpecifics>
-        <osSpecific>
-          <osFamily>any</osFamily>
-          <packages>
-            <package>
-              <name>hive_2_9_9_9_117-hcatalog</name>
-            </package>
-          </packages>
-        </osSpecific>
-      </osSpecifics>
-    </service>
-
   </services>
 </metainfo>

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-server/src/main/resources/stacks/HDP/2.2/services/WEBHCAT/configuration/webhcat-site.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2/services/WEBHCAT/configuration/webhcat-site.xml b/ambari-server/src/main/resources/stacks/HDP/2.2/services/WEBHCAT/configuration/webhcat-site.xml
deleted file mode 100644
index ce9f2c5..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/2.2/services/WEBHCAT/configuration/webhcat-site.xml
+++ /dev/null
@@ -1,59 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!-- 
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
--->
-
-<!-- The default settings for Templeton. -->
-<!-- Edit templeton-site.xml to change settings for your local -->
-<!-- install. -->
-
-<configuration supports_final="true">
-
-  <property>
-    <name>templeton.hadoop.conf.dir</name>
-    <value>/usr/hdp/2.9.9.9-117/etc/hadoop/conf</value>
-    <description>The path to the Hadoop configuration.</description>
-  </property>
-
-  <property>
-    <name>templeton.jar</name>
-    <value>/usr/hdp/2.9.9.9-117/hcatalog/share/webhcat/svr/webhcat.jar</value>
-    <description>The path to the Templeton jar file.</description>
-  </property>
-
-  <property>
-    <name>templeton.libjars</name>
-    <value>/usr/hdp/2.9.9.9-117/zookeeper/zookeeper.jar</value>
-    <description>Jars to add the the classpath.</description>
-  </property>
-
-
-  <property>
-    <name>templeton.hadoop</name>
-    <value>/usr/hdp/2.9.9.9-117/hadoop/bin/hadoop</value>
-    <description>The path to the Hadoop executable.</description>
-  </property>
-
-
-  <property>
-    <name>templeton.hcat</name>
-    <value>/usr/hdp/2.9.9.9-117/hive/bin/hcat</value>
-    <description>The path to the hcatalog executable.</description>
-  </property>
-
-
-</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-server/src/test/java/org/apache/ambari/server/api/util/StackExtensionHelperTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/api/util/StackExtensionHelperTest.java b/ambari-server/src/test/java/org/apache/ambari/server/api/util/StackExtensionHelperTest.java
index dfb1b81..346202e 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/api/util/StackExtensionHelperTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/api/util/StackExtensionHelperTest.java
@@ -18,35 +18,26 @@
 
 package org.apache.ambari.server.api.util;
 
+import com.google.inject.AbstractModule;
+import com.google.inject.Guice;
+import com.google.inject.Injector;
 import org.apache.ambari.server.api.services.AmbariMetaInfo;
 import org.apache.ambari.server.metadata.ActionMetadata;
 import org.apache.ambari.server.state.*;
-
-import java.io.File;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-import static org.easymock.EasyMock.*;
-import static org.junit.Assert.*;
-
 import org.apache.ambari.server.state.stack.ConfigurationXml;
 import org.junit.Test;
 import org.xml.sax.SAXException;
 
-import com.google.inject.AbstractModule;
-import com.google.inject.Guice;
-import com.google.inject.Injector;
-
 import javax.xml.bind.JAXBException;
 import javax.xml.namespace.QName;
 import javax.xml.parsers.ParserConfigurationException;
 import javax.xml.xpath.XPathExpressionException;
+import java.io.File;
+import java.io.IOException;
+import java.util.*;
+
+import static org.easymock.EasyMock.*;
+import static org.junit.Assert.*;
 
 public class StackExtensionHelperTest {
 
@@ -305,7 +296,7 @@ public class StackExtensionHelperTest {
     helper.populateServicesForStack(stackInfo);
     helper.fillInfo();
     List<ServiceInfo> allServices = helper.getAllApplicableServices(stackInfo);
-    assertEquals(13, allServices.size());
+    assertEquals(12, allServices.size());
     for (ServiceInfo serviceInfo : allServices) {
       if (serviceInfo.getName().equals("NAGIOS")) {
         assertTrue(serviceInfo.isMonitoringService());

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java
index 784b565..8a3e270 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java
@@ -18,69 +18,19 @@
 
 package org.apache.ambari.server.controller;
 
-import static org.easymock.EasyMock.capture;
-import static org.easymock.EasyMock.createNiceMock;
-import static org.easymock.EasyMock.createStrictMock;
-import static org.easymock.EasyMock.expect;
-import static org.easymock.EasyMock.replay;
-import static org.easymock.EasyMock.verify;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertNull;
-import static org.junit.Assert.assertSame;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.fail;
-
-import java.io.File;
-import java.io.FileNotFoundException;
-import java.io.IOException;
-import java.lang.reflect.Type;
-import java.net.ConnectException;
-import java.net.MalformedURLException;
-import java.net.UnknownHostException;
-import java.text.MessageFormat;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Properties;
-import java.util.Set;
-
-import javax.persistence.EntityManager;
-
+import com.google.gson.Gson;
+import com.google.gson.reflect.TypeToken;
+import com.google.inject.AbstractModule;
+import com.google.inject.Guice;
+import com.google.inject.Injector;
+import com.google.inject.persist.PersistService;
 import junit.framework.Assert;
-
-import org.apache.ambari.server.AmbariException;
-import org.apache.ambari.server.ClusterNotFoundException;
-import org.apache.ambari.server.DuplicateResourceException;
-import org.apache.ambari.server.HostNotFoundException;
-import org.apache.ambari.server.ObjectNotFoundException;
-import org.apache.ambari.server.ParentObjectNotFoundException;
-import org.apache.ambari.server.Role;
-import org.apache.ambari.server.RoleCommand;
-import org.apache.ambari.server.ServiceNotFoundException;
-import org.apache.ambari.server.StackAccessException;
-import org.apache.ambari.server.actionmanager.ActionDBAccessor;
-import org.apache.ambari.server.actionmanager.ActionType;
-import org.apache.ambari.server.actionmanager.ExecutionCommandWrapper;
-import org.apache.ambari.server.actionmanager.HostRoleCommand;
-import org.apache.ambari.server.actionmanager.HostRoleStatus;
-import org.apache.ambari.server.actionmanager.Request;
-import org.apache.ambari.server.actionmanager.Stage;
-import org.apache.ambari.server.actionmanager.TargetHostType;
+import org.apache.ambari.server.*;
+import org.apache.ambari.server.actionmanager.*;
 import org.apache.ambari.server.agent.ExecutionCommand;
 import org.apache.ambari.server.api.services.AmbariMetaInfo;
 import org.apache.ambari.server.configuration.Configuration;
-import org.apache.ambari.server.controller.internal.ComponentResourceProviderTest;
-import org.apache.ambari.server.controller.internal.HostResourceProviderTest;
-import org.apache.ambari.server.controller.internal.RequestOperationLevel;
-import org.apache.ambari.server.controller.internal.RequestResourceFilter;
-import org.apache.ambari.server.controller.internal.ServiceResourceProviderTest;
+import org.apache.ambari.server.controller.internal.*;
 import org.apache.ambari.server.controller.spi.Resource;
 import org.apache.ambari.server.customactions.ActionDefinition;
 import org.apache.ambari.server.metadata.ActionMetadata;
@@ -93,52 +43,31 @@ import org.apache.ambari.server.security.authorization.Users;
 import org.apache.ambari.server.serveraction.ServerAction;
 import org.apache.ambari.server.serveraction.ServerActionManager;
 import org.apache.ambari.server.serveraction.ServerActionManagerImpl;
-import org.apache.ambari.server.state.Cluster;
-import org.apache.ambari.server.state.Clusters;
-import org.apache.ambari.server.state.Config;
-import org.apache.ambari.server.state.ConfigFactory;
-import org.apache.ambari.server.state.ConfigHelper;
-import org.apache.ambari.server.state.ConfigImpl;
-import org.apache.ambari.server.state.Host;
-import org.apache.ambari.server.state.HostComponentAdminState;
-import org.apache.ambari.server.state.HostState;
-import org.apache.ambari.server.state.MaintenanceState;
-import org.apache.ambari.server.state.RepositoryInfo;
-import org.apache.ambari.server.state.Service;
-import org.apache.ambari.server.state.ServiceComponent;
-import org.apache.ambari.server.state.ServiceComponentFactory;
-import org.apache.ambari.server.state.ServiceComponentHost;
-import org.apache.ambari.server.state.ServiceComponentHostFactory;
-import org.apache.ambari.server.state.ServiceFactory;
-import org.apache.ambari.server.state.StackId;
-import org.apache.ambari.server.state.StackInfo;
-import org.apache.ambari.server.state.State;
+import org.apache.ambari.server.state.*;
 import org.apache.ambari.server.state.configgroup.ConfigGroup;
 import org.apache.ambari.server.state.configgroup.ConfigGroupFactory;
-import org.apache.ambari.server.state.svccomphost.ServiceComponentHostInstallEvent;
-import org.apache.ambari.server.state.svccomphost.ServiceComponentHostOpSucceededEvent;
-import org.apache.ambari.server.state.svccomphost.ServiceComponentHostStartEvent;
-import org.apache.ambari.server.state.svccomphost.ServiceComponentHostStartedEvent;
-import org.apache.ambari.server.state.svccomphost.ServiceComponentHostStopEvent;
-import org.apache.ambari.server.state.svccomphost.ServiceComponentHostStoppedEvent;
+import org.apache.ambari.server.state.svccomphost.*;
 import org.apache.ambari.server.utils.StageUtils;
 import org.apache.commons.collections.CollectionUtils;
 import org.easymock.Capture;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Ignore;
-import org.junit.Rule;
-import org.junit.Test;
+import org.junit.*;
 import org.junit.rules.ExpectedException;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import com.google.gson.Gson;
-import com.google.gson.reflect.TypeToken;
-import com.google.inject.AbstractModule;
-import com.google.inject.Guice;
-import com.google.inject.Injector;
-import com.google.inject.persist.PersistService;
+import javax.persistence.EntityManager;
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.lang.reflect.Type;
+import java.net.ConnectException;
+import java.net.MalformedURLException;
+import java.net.UnknownHostException;
+import java.text.MessageFormat;
+import java.util.*;
+
+import static org.easymock.EasyMock.*;
+import static org.junit.Assert.*;
 
 public class AmbariManagementControllerTest {
 
@@ -7110,7 +7039,7 @@ public class AmbariManagementControllerTest {
   public void testGetStackServices() throws Exception {
     StackServiceRequest request = new StackServiceRequest(STACK_NAME, NEW_STACK_VERSION, null);
     Set<StackServiceResponse> responses = controller.getStackServices(Collections.singleton(request));
-    Assert.assertEquals(12, responses.size());
+    Assert.assertEquals(11, responses.size());
 
 
     StackServiceRequest requestWithParams = new StackServiceRequest(STACK_NAME, NEW_STACK_VERSION, SERVICE_NAME);

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/BaseBlueprintProcessorTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/BaseBlueprintProcessorTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/BaseBlueprintProcessorTest.java
index ffe2ea0..be5aea8 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/BaseBlueprintProcessorTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/BaseBlueprintProcessorTest.java
@@ -4,9 +4,6 @@ import org.apache.ambari.server.controller.AmbariManagementController;
 import org.apache.ambari.server.controller.StackServiceResponse;
 import org.apache.ambari.server.state.DependencyInfo;
 import org.easymock.EasyMockSupport;
-import static org.easymock.EasyMock.expect;
-import static org.easymock.EasyMock.isA;
-
 import org.junit.Before;
 import org.junit.Test;
 
@@ -15,7 +12,9 @@ import java.util.Collections;
 import java.util.HashSet;
 import java.util.Set;
 
-import static org.junit.Assert.*;
+import static org.easymock.EasyMock.expect;
+import static org.easymock.EasyMock.isA;
+import static org.junit.Assert.assertEquals;
 
 /**
  * Licensed to the Apache Software Foundation (ASF) under one
@@ -53,7 +52,7 @@ public class BaseBlueprintProcessorTest {
     expect(mockMgmtController.getStackServices(isA(Set.class))).andReturn(Collections.<StackServiceResponse>emptySet());
 
     // test dependencies
-    final DependencyInfo hCatDependency = new TestDependencyInfo("WEBHCAT/HCAT");
+    final DependencyInfo hCatDependency = new TestDependencyInfo("HIVE/HCAT");
     final DependencyInfo yarnClientDependency = new TestDependencyInfo("YARN/YARN_CLIENT");
     final DependencyInfo tezClientDependency = new TestDependencyInfo("TEZ/TEZ_CLIENT");
     final DependencyInfo mapReduceTwoClientDependency = new TestDependencyInfo("YARN/MAPREDUCE2_CLIENT");
@@ -92,7 +91,7 @@ public class BaseBlueprintProcessorTest {
                  5, testStack.getDependencyConditionalServiceMap().size());
 
     assertEquals("Incorrect service dependency for HCAT",
-                 "HCATALOG", testStack.getDependencyConditionalServiceMap().get(hCatDependency));
+                 "HIVE", testStack.getDependencyConditionalServiceMap().get(hCatDependency));
     assertEquals("Incorrect service dependency for YARN_CLIENT",
                  "YARN", testStack.getDependencyConditionalServiceMap().get(yarnClientDependency));
     assertEquals("Incorrect service dependency for TEZ_CLIENT",
@@ -175,7 +174,7 @@ public class BaseBlueprintProcessorTest {
     expect(mockMgmtController.getStackServices(isA(Set.class))).andReturn(Collections.<StackServiceResponse>emptySet());
 
     // test dependencies
-    final DependencyInfo hCatDependency = new TestDependencyInfo("WEBHCAT/HCAT");
+    final DependencyInfo hCatDependency = new TestDependencyInfo("HIVE/HCAT");
     final DependencyInfo tezClientDependency = new TestDependencyInfo("TEZ/TEZ_CLIENT");
     final DependencyInfo mapReduceTwoClientDependency = new TestDependencyInfo("YARN/MAPREDUCE2_CLIENT");
     final DependencyInfo oozieClientDependency = new TestDependencyInfo("OOZIE/OOZIE_CLIENT");
@@ -212,7 +211,7 @@ public class BaseBlueprintProcessorTest {
       4, testStack.getDependencyConditionalServiceMap().size());
 
     assertEquals("Incorrect service dependency for HCAT",
-      "HCATALOG", testStack.getDependencyConditionalServiceMap().get(hCatDependency));
+      "HIVE", testStack.getDependencyConditionalServiceMap().get(hCatDependency));
     assertEquals("Incorrect service dependency for TEZ_CLIENT",
       "TEZ", testStack.getDependencyConditionalServiceMap().get(tezClientDependency));
     assertEquals("Incorrect service dependency for MAPREDUCE2_CLIENT",
@@ -234,7 +233,7 @@ public class BaseBlueprintProcessorTest {
     expect(mockMgmtController.getStackServices(isA(Set.class))).andReturn(Collections.<StackServiceResponse>emptySet());
 
     // test dependencies
-    final DependencyInfo hCatDependency = new TestDependencyInfo("WEBHCAT/HCAT");
+    final DependencyInfo hCatDependency = new TestDependencyInfo("HIVE/HCAT");
     final DependencyInfo yarnClientDependency = new TestDependencyInfo("YARN/YARN_CLIENT");
     final DependencyInfo mapReduceTwoClientDependency = new TestDependencyInfo("YARN/MAPREDUCE2_CLIENT");
     final DependencyInfo oozieClientDependency = new TestDependencyInfo("OOZIE/OOZIE_CLIENT");
@@ -271,7 +270,7 @@ public class BaseBlueprintProcessorTest {
       4, testStack.getDependencyConditionalServiceMap().size());
 
     assertEquals("Incorrect service dependency for HCAT",
-      "HCATALOG", testStack.getDependencyConditionalServiceMap().get(hCatDependency));
+      "HIVE", testStack.getDependencyConditionalServiceMap().get(hCatDependency));
     assertEquals("Incorrect service dependency for YARN_CLIENT",
       "YARN", testStack.getDependencyConditionalServiceMap().get(yarnClientDependency));
     assertEquals("Incorrect service dependency for MAPREDUCE2_CLIENT",
@@ -293,7 +292,7 @@ public class BaseBlueprintProcessorTest {
     expect(mockMgmtController.getStackServices(isA(Set.class))).andReturn(Collections.<StackServiceResponse>emptySet());
 
     // test dependencies
-    final DependencyInfo hCatDependency = new TestDependencyInfo("WEBHCAT/HCAT");
+    final DependencyInfo hCatDependency = new TestDependencyInfo("HIVE/HCAT");
     final DependencyInfo yarnClientDependency = new TestDependencyInfo("YARN/YARN_CLIENT");
     final DependencyInfo tezClientDependency = new TestDependencyInfo("TEZ/TEZ_CLIENT");
     final DependencyInfo oozieClientDependency = new TestDependencyInfo("OOZIE/OOZIE_CLIENT");
@@ -330,7 +329,7 @@ public class BaseBlueprintProcessorTest {
       4, testStack.getDependencyConditionalServiceMap().size());
 
     assertEquals("Incorrect service dependency for HCAT",
-      "HCATALOG", testStack.getDependencyConditionalServiceMap().get(hCatDependency));
+      "HIVE", testStack.getDependencyConditionalServiceMap().get(hCatDependency));
     assertEquals("Incorrect service dependency for YARN_CLIENT",
       "YARN", testStack.getDependencyConditionalServiceMap().get(yarnClientDependency));
     assertEquals("Incorrect service dependency for TEZ_CLIENT",
@@ -352,7 +351,7 @@ public class BaseBlueprintProcessorTest {
     expect(mockMgmtController.getStackServices(isA(Set.class))).andReturn(Collections.<StackServiceResponse>emptySet());
 
     // test dependencies
-    final DependencyInfo hCatDependency = new TestDependencyInfo("WEBHCAT/HCAT");
+    final DependencyInfo hCatDependency = new TestDependencyInfo("HIVE/HCAT");
     final DependencyInfo yarnClientDependency = new TestDependencyInfo("YARN/YARN_CLIENT");
     final DependencyInfo tezClientDependency = new TestDependencyInfo("TEZ/TEZ_CLIENT");
     final DependencyInfo mapReduceTwoClientDependency = new TestDependencyInfo("YARN/MAPREDUCE2_CLIENT");
@@ -389,7 +388,7 @@ public class BaseBlueprintProcessorTest {
       4, testStack.getDependencyConditionalServiceMap().size());
 
     assertEquals("Incorrect service dependency for HCAT",
-      "HCATALOG", testStack.getDependencyConditionalServiceMap().get(hCatDependency));
+      "HIVE", testStack.getDependencyConditionalServiceMap().get(hCatDependency));
     assertEquals("Incorrect service dependency for YARN_CLIENT",
       "YARN", testStack.getDependencyConditionalServiceMap().get(yarnClientDependency));
     assertEquals("Incorrect service dependency for TEZ_CLIENT",

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog170Test.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog170Test.java b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog170Test.java
index c7f5f1b..138b4b1 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog170Test.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog170Test.java
@@ -18,93 +18,51 @@
 
 package org.apache.ambari.server.upgrade;
 
-import static junit.framework.Assert.assertEquals;
-import static junit.framework.Assert.assertNull;
-import static junit.framework.Assert.assertTrue;
-import static org.easymock.EasyMock.anyObject;
-import static org.easymock.EasyMock.capture;
-import static org.easymock.EasyMock.createMockBuilder;
-import static org.easymock.EasyMock.createNiceMock;
-import static org.easymock.EasyMock.createStrictMock;
-import static org.easymock.EasyMock.eq;
-import static org.easymock.EasyMock.expect;
-import static org.easymock.EasyMock.expectLastCall;
-import static org.easymock.EasyMock.isA;
-import static org.easymock.EasyMock.replay;
-import static org.easymock.EasyMock.reset;
-import static org.easymock.EasyMock.verify;
-
-import java.lang.reflect.Field;
-import java.lang.reflect.Method;
-import java.sql.Connection;
-import java.sql.PreparedStatement;
-import java.sql.ResultSet;
-import java.sql.SQLException;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-
-import javax.persistence.EntityManager;
-import javax.persistence.EntityTransaction;
-import javax.persistence.TypedQuery;
-import javax.persistence.criteria.CriteriaBuilder;
-import javax.persistence.criteria.CriteriaQuery;
-import javax.persistence.criteria.Order;
-import javax.persistence.criteria.Path;
-import javax.persistence.criteria.Predicate;
-import javax.persistence.criteria.Root;
-import javax.persistence.metamodel.SingularAttribute;
-
+import com.google.inject.*;
+import com.google.inject.persist.PersistService;
+import com.google.inject.persist.Transactional;
+import org.apache.ambari.server.AmbariException;
 import org.apache.ambari.server.configuration.Configuration;
 import org.apache.ambari.server.controller.AmbariManagementController;
 import org.apache.ambari.server.orm.DBAccessor;
-import org.apache.ambari.server.orm.dao.ClusterDAO;
-import org.apache.ambari.server.orm.dao.KeyValueDAO;
-import org.apache.ambari.server.orm.dao.PermissionDAO;
-import org.apache.ambari.server.orm.dao.PrincipalDAO;
-import org.apache.ambari.server.orm.dao.PrincipalTypeDAO;
-import org.apache.ambari.server.orm.dao.PrivilegeDAO;
-import org.apache.ambari.server.orm.dao.ResourceDAO;
-import org.apache.ambari.server.orm.dao.ResourceTypeDAO;
-import org.apache.ambari.server.orm.dao.UserDAO;
-import org.apache.ambari.server.orm.dao.ViewDAO;
-import org.apache.ambari.server.orm.dao.ViewInstanceDAO;
-import org.apache.ambari.server.orm.dao.ConfigGroupConfigMappingDAO;
-import org.apache.ambari.server.orm.entities.ClusterEntity;
-import org.apache.ambari.server.orm.entities.HostRoleCommandEntity;
-import org.apache.ambari.server.orm.entities.KeyValueEntity;
-import org.apache.ambari.server.orm.entities.PrivilegeEntity;
-import org.apache.ambari.server.orm.entities.ResourceEntity;
-import org.apache.ambari.server.orm.entities.UserEntity;
-import org.apache.ambari.server.orm.entities.ViewEntity;
-import org.apache.ambari.server.orm.entities.ViewInstanceEntity;
-import org.apache.ambari.server.orm.entities.ConfigGroupConfigMappingEntity;
-import org.apache.ambari.server.orm.entities.ClusterConfigEntity;
-import org.apache.ambari.server.state.Cluster;
-import org.apache.ambari.server.state.Clusters;
-import org.apache.ambari.server.state.Config;
-import org.apache.ambari.server.state.ConfigHelper;
-import org.apache.ambari.server.state.StackId;
+import org.apache.ambari.server.orm.GuiceJpaInitializer;
+import org.apache.ambari.server.orm.InMemoryDefaultTestModule;
+import org.apache.ambari.server.orm.dao.*;
+import org.apache.ambari.server.orm.entities.*;
+import org.apache.ambari.server.state.*;
 import org.easymock.Capture;
+import org.junit.After;
 import org.junit.Assert;
 import org.junit.Before;
 import org.junit.Test;
 
-import com.google.inject.Binder;
-import com.google.inject.Guice;
-import com.google.inject.Injector;
-import com.google.inject.Module;
-import com.google.inject.Provider;
+import javax.persistence.EntityManager;
+import javax.persistence.EntityTransaction;
+import javax.persistence.TypedQuery;
+import javax.persistence.criteria.*;
+import javax.persistence.metamodel.SingularAttribute;
+import java.lang.reflect.Field;
+import java.lang.reflect.Method;
+import java.sql.Connection;
+import java.sql.PreparedStatement;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.util.*;
+
+import static junit.framework.Assert.*;
+import static org.easymock.EasyMock.*;
 
 /**
  * UpgradeCatalog170 unit tests.
  */
 public class UpgradeCatalog170Test {
 
+  private Injector injector;
+  private final String CLUSTER_NAME = "c1";
+  private final String SERVICE_NAME = "HDFS";
+  private final String HOST_NAME = "h1";
+  private final String DESIRED_STACK_VERSION = "{\"stackName\":\"HDP\",\"stackVersion\":\"2.0.6\"}";
+
   Provider<EntityManager> entityManagerProvider = createStrictMock(Provider.class);
   EntityManager entityManager = createStrictMock(EntityManager.class);
 
@@ -113,8 +71,145 @@ public class UpgradeCatalog170Test {
     reset(entityManagerProvider);
     expect(entityManagerProvider.get()).andReturn(entityManager).anyTimes();
     replay(entityManagerProvider);
+    injector = Guice.createInjector(new InMemoryDefaultTestModule());
+    injector.getInstance(GuiceJpaInitializer.class);
+  }
+
+  @After
+  public void tearDown() {
+    injector.getInstance(PersistService.class).stop();
+  }
+
+  private ClusterEntity createCluster() {
+    ResourceTypeDAO resourceTypeDAO = injector.getInstance(ResourceTypeDAO.class);
+
+    // create an admin resource to represent this cluster
+    ResourceTypeEntity resourceTypeEntity = resourceTypeDAO.findById(ResourceTypeEntity.CLUSTER_RESOURCE_TYPE);
+    if (resourceTypeEntity == null) {
+      resourceTypeEntity = new ResourceTypeEntity();
+      resourceTypeEntity.setId(ResourceTypeEntity.CLUSTER_RESOURCE_TYPE);
+      resourceTypeEntity.setName(ResourceTypeEntity.CLUSTER_RESOURCE_TYPE_NAME);
+      resourceTypeEntity = resourceTypeDAO.merge(resourceTypeEntity);
+    }
+    ResourceEntity resourceEntity = new ResourceEntity();
+    resourceEntity.setResourceType(resourceTypeEntity);
+
+    ClusterDAO clusterDAO = injector.getInstance(ClusterDAO.class);
+    ClusterEntity clusterEntity = new ClusterEntity();
+    clusterEntity.setClusterId(1L);
+    clusterEntity.setClusterName(CLUSTER_NAME);
+    clusterEntity.setDesiredStackVersion(DESIRED_STACK_VERSION);
+    clusterEntity.setResource(resourceEntity);
+    clusterDAO.create(clusterEntity);
+    return clusterEntity;
+  }
+
+  private ClusterServiceEntity createService(ClusterEntity clusterEntity, String serviceName) {
+    ClusterServiceDAO clusterServiceDAO = injector.getInstance(ClusterServiceDAO.class);
+    ClusterServiceEntity clusterServiceEntity = new ClusterServiceEntity();
+    clusterServiceEntity.setClusterId(1L);
+    clusterServiceEntity.setClusterEntity(clusterEntity);
+    clusterServiceEntity.setServiceName(serviceName);
+    clusterServiceDAO.create(clusterServiceEntity);
+    return clusterServiceEntity;
   }
 
+  private ClusterServiceEntity addService(ClusterEntity clusterEntity, String serviceName) {
+    ClusterDAO clusterDAO = injector.getInstance(ClusterDAO.class);
+
+    ClusterServiceEntity clusterServiceEntity = createService(clusterEntity, serviceName);
+
+    ServiceDesiredStateEntity serviceDesiredStateEntity = new ServiceDesiredStateEntity();
+    serviceDesiredStateEntity.setDesiredStackVersion(DESIRED_STACK_VERSION);
+    serviceDesiredStateEntity.setClusterId(1L);
+    serviceDesiredStateEntity.setServiceName(serviceName);
+    serviceDesiredStateEntity.setClusterServiceEntity(clusterServiceEntity);
+
+    clusterServiceEntity.setServiceDesiredStateEntity(serviceDesiredStateEntity);
+    clusterEntity.getClusterServiceEntities().add(clusterServiceEntity);
+
+    clusterDAO.merge(clusterEntity);
+
+    return clusterServiceEntity;
+  }
+
+
+  private HostEntity createHost(ClusterEntity clusterEntity) {
+    HostDAO hostDAO = injector.getInstance(HostDAO.class);
+    ClusterDAO clusterDAO = injector.getInstance(ClusterDAO.class);
+    HostEntity hostEntity = new HostEntity();
+    hostEntity.setHostName(HOST_NAME);
+    hostEntity.setClusterEntities(Collections.singletonList(clusterEntity));
+    hostDAO.create(hostEntity);
+    clusterEntity.getHostEntities().add(hostEntity);
+    clusterDAO.merge(clusterEntity);
+    return hostEntity;
+  }
+
+  @Transactional
+  private void addComponent(ClusterEntity clusterEntity, ClusterServiceEntity clusterServiceEntity, HostEntity hostEntity, String componentName) {
+    ServiceComponentDesiredStateEntity componentDesiredStateEntity = new ServiceComponentDesiredStateEntity();
+    componentDesiredStateEntity.setClusterServiceEntity(clusterServiceEntity);
+    componentDesiredStateEntity.setComponentName(componentName);
+    componentDesiredStateEntity.setServiceName(clusterServiceEntity.getServiceName());
+    componentDesiredStateEntity.setDesiredStackVersion(DESIRED_STACK_VERSION);
+    componentDesiredStateEntity.setClusterServiceEntity(clusterServiceEntity);
+    componentDesiredStateEntity.setClusterId(clusterServiceEntity.getClusterId());
+
+    HostComponentDesiredStateDAO hostComponentDesiredStateDAO = injector.getInstance(HostComponentDesiredStateDAO.class);
+    HostComponentDesiredStateEntity hostComponentDesiredStateEntity = new HostComponentDesiredStateEntity();
+    hostComponentDesiredStateEntity.setClusterId(clusterEntity.getClusterId());
+    hostComponentDesiredStateEntity.setHostName(hostEntity.getHostName());
+    hostComponentDesiredStateEntity.setComponentName(componentName);
+    hostComponentDesiredStateEntity.setServiceName(clusterServiceEntity.getServiceName());
+    hostComponentDesiredStateEntity.setAdminState(HostComponentAdminState.INSERVICE);
+    hostComponentDesiredStateEntity.setServiceComponentDesiredStateEntity(componentDesiredStateEntity);
+    hostComponentDesiredStateEntity.setHostEntity(hostEntity);
+    hostComponentDesiredStateDAO.create(hostComponentDesiredStateEntity);
+
+
+    HostComponentStateEntity hostComponentStateEntity = new HostComponentStateEntity();
+    hostComponentStateEntity.setHostEntity(hostEntity);
+    hostComponentStateEntity.setHostName(hostEntity.getHostName());
+    hostComponentStateEntity.setComponentName(componentName);
+    hostComponentStateEntity.setServiceName(clusterServiceEntity.getServiceName());
+    hostComponentStateEntity.setClusterId(clusterEntity.getClusterId());
+    hostComponentStateEntity.setCurrentStackVersion(clusterEntity.getDesiredStackVersion());
+    hostComponentStateEntity.setServiceComponentDesiredStateEntity(componentDesiredStateEntity);
+
+    componentDesiredStateEntity.setHostComponentStateEntities(Collections.singletonList(hostComponentStateEntity));
+    componentDesiredStateEntity.setHostComponentDesiredStateEntities(Collections.singletonList(hostComponentDesiredStateEntity));
+
+    hostEntity.getHostComponentStateEntities().add(hostComponentStateEntity);
+    hostEntity.getHostComponentDesiredStateEntities().add(hostComponentDesiredStateEntity);
+
+    clusterServiceEntity.getServiceComponentDesiredStateEntities().add(componentDesiredStateEntity);
+
+    ClusterServiceDAO clusterServiceDAO = injector.getInstance(ClusterServiceDAO.class);
+    ServiceComponentDesiredStateDAO serviceComponentDesiredStateDAO = injector.getInstance(ServiceComponentDesiredStateDAO.class);
+    HostDAO hostDAO = injector.getInstance(HostDAO.class);
+    serviceComponentDesiredStateDAO.merge(componentDesiredStateEntity);
+    hostDAO.merge(hostEntity);
+    clusterServiceDAO.merge(clusterServiceEntity);
+  }
+
+  @Test
+  public void testMoveHcatalogIntoHiveService()  throws AmbariException{
+    final ClusterEntity clusterEntity = createCluster();
+    final ClusterServiceEntity clusterServiceEntityHDFS = addService(clusterEntity, "HDFS");
+    final ClusterServiceEntity clusterServiceEntityHIVE = addService(clusterEntity, "HIVE");
+    final ClusterServiceEntity clusterServiceEntityHCATALOG = addService(clusterEntity, "HCATALOG");
+    final ClusterServiceEntity clusterServiceEntityWEBHCAT = addService(clusterEntity, "WEBHCAT");
+    final HostEntity hostEntity = createHost(clusterEntity);
+    addComponent(clusterEntity, clusterServiceEntityHDFS, hostEntity, "NAMENODE");
+    addComponent(clusterEntity, clusterServiceEntityHIVE, hostEntity, "HIVE_SERVER");
+    addComponent(clusterEntity, clusterServiceEntityHCATALOG, hostEntity, "HCAT");
+    addComponent(clusterEntity, clusterServiceEntityWEBHCAT, hostEntity, "WEBHCAT_SERVER");
+    UpgradeCatalog170 upgradeCatalog170 = injector.getInstance(UpgradeCatalog170.class);
+    upgradeCatalog170.moveHcatalogIntoHiveService();
+  }
+
+
   @Test
   public void testExecuteDDLUpdates() throws Exception {
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-server/src/test/python/stacks/1.3.2/HIVE/test_hive_service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/1.3.2/HIVE/test_hive_service_check.py b/ambari-server/src/test/python/stacks/1.3.2/HIVE/test_hive_service_check.py
index 9443e16..449c950 100644
--- a/ambari-server/src/test/python/stacks/1.3.2/HIVE/test_hive_service_check.py
+++ b/ambari-server/src/test/python/stacks/1.3.2/HIVE/test_hive_service_check.py
@@ -59,6 +59,16 @@ class TestServiceCheck(RMFTestCase):
                         user = 'ambari-qa',
                         try_sleep = 5,
     )
+    self.assertResourceCalled('File', '/tmp/templetonSmoke.sh',
+                              content = StaticFile('templetonSmoke.sh'),
+                              mode = 0755,
+                              )
+    self.assertResourceCalled('Execute', '/tmp/templetonSmoke.sh c6402.ambari.apache.org ambari-qa no_keytab false /usr/bin/kinit',
+                              logoutput = True,
+                              path = ['/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin'],
+                              tries = 3,
+                              try_sleep = 5,
+                              )
     self.assertNoMoreResources()
 
   @patch("sys.exit")
@@ -96,4 +106,14 @@ class TestServiceCheck(RMFTestCase):
                         user = 'ambari-qa',
                         try_sleep = 5,
     )
+    self.assertResourceCalled('File', '/tmp/templetonSmoke.sh',
+                              content = StaticFile('templetonSmoke.sh'),
+                              mode = 0755,
+                              )
+    self.assertResourceCalled('Execute', '/tmp/templetonSmoke.sh c6402.ambari.apache.org ambari-qa /etc/security/keytabs/smokeuser.headless.keytab true /usr/bin/kinit',
+                              logoutput = True,
+                              path = ['/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin'],
+                              tries = 3,
+                              try_sleep = 5,
+                              )
     self.assertNoMoreResources()


[21/27] git commit: AMBARI-7319. Rebalance of HDFS is not working for HDP 1.3. (jaimin)

Posted by jo...@apache.org.
AMBARI-7319. Rebalance of HDFS is not working for HDP 1.3. (jaimin)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/f84ee8dd
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/f84ee8dd
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/f84ee8dd

Branch: refs/heads/branch-alerts-dev
Commit: f84ee8dd7985224b6481a796c6102bb56e0251e8
Parents: 601014e
Author: Jaimin Jetly <ja...@hortonworks.com>
Authored: Mon Sep 15 15:31:52 2014 -0700
Committer: Jaimin Jetly <ja...@hortonworks.com>
Committed: Mon Sep 15 15:31:52 2014 -0700

----------------------------------------------------------------------
 ambari-web/app/views/main/service/item.js | 5 ++++-
 1 file changed, 4 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/f84ee8dd/ambari-web/app/views/main/service/item.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views/main/service/item.js b/ambari-web/app/views/main/service/item.js
index 2dda3c0..7a388fd 100644
--- a/ambari-web/app/views/main/service/item.js
+++ b/ambari-web/app/views/main/service/item.js
@@ -196,7 +196,10 @@ App.MainServiceItemView = Em.View.extend({
       }
       options.push(actionMap.TOGGLE_PASSIVE);
       var serviceName = service.get('serviceName');
-      if (serviceName === 'HDFS') {
+      var nnComponent = App.StackServiceComponent.find().findProperty('componentName','NAMENODE');
+      if (serviceName === 'HDFS' && nnComponent) {
+        var namenodeCustomCommands = nnComponent.get('customCommands');
+        if (namenodeCustomCommands && namenodeCustomCommands.contains('REBALANCEHDFS'))
         options.push(actionMap.REBALANCE_HDFS);
       }
       self.addActionMap().filterProperty('service', serviceName).forEach(function(item) {


[08/27] git commit: AMBARI-7312. Ambari namenode UI link checks deprecated property for ssl enabled hdfs for HDP 2.1.x. (Denys Buzhor via akovalenko)

Posted by jo...@apache.org.
AMBARI-7312. Ambari namenode UI link checks deprecated property for ssl enabled hdfs for HDP 2.1.x. (Denys Buzhor via akovalenko)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/8dd0e81d
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/8dd0e81d
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/8dd0e81d

Branch: refs/heads/branch-alerts-dev
Commit: 8dd0e81d8c8c3d4bfb14a0b6e6f301d74c0eac72
Parents: e3dba26
Author: Aleksandr Kovalenko <ak...@hortonworks.com>
Authored: Mon Sep 15 19:06:50 2014 +0300
Committer: Aleksandr Kovalenko <ak...@hortonworks.com>
Committed: Mon Sep 15 19:09:05 2014 +0300

----------------------------------------------------------------------
 .../services/HDFS/configuration/core-site.xml             |  9 +++++++++
 .../HDP/2.0.6/services/HDFS/configuration/core-site.xml   | 10 ++++++++++
 ambari-web/app/views/common/quick_view_link_view.js       |  6 +++++-
 3 files changed, 24 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/8dd0e81d/ambari-server/src/main/resources/stacks/HDP/2.0.6.GlusterFS/services/HDFS/configuration/core-site.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6.GlusterFS/services/HDFS/configuration/core-site.xml b/ambari-server/src/main/resources/stacks/HDP/2.0.6.GlusterFS/services/HDFS/configuration/core-site.xml
index e335fe2..e79b146 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6.GlusterFS/services/HDFS/configuration/core-site.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6.GlusterFS/services/HDFS/configuration/core-site.xml
@@ -164,4 +164,13 @@ DEFAULT
     </description>
   </property>
 
+  <property>
+    <name>dfs.http.policy</name>
+    <value>HTTP_ONLY</value>
+    <description>
+      Decide if HTTPS(SSL) is supported on HDFS This configures the HTTP endpoint for HDFS daemons:
+      The following values are supported: - HTTP_ONLY : Service is provided only on http - HTTPS_ONLY :
+      Service is provided only on https - HTTP_AND_HTTPS : Service is provided both on http and https
+    </description>
+  </property>
 </configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/8dd0e81d/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/configuration/core-site.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/configuration/core-site.xml b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/configuration/core-site.xml
index 52764ac..31254b6 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/configuration/core-site.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/configuration/core-site.xml
@@ -177,4 +177,14 @@ DEFAULT
     </description>
   </property>
 
+  <property>
+    <name>dfs.http.policy</name>
+    <value>HTTP_ONLY</value>
+    <description>
+      Decide if HTTPS(SSL) is supported on HDFS This configures the HTTP endpoint for HDFS daemons:
+      The following values are supported: - HTTP_ONLY : Service is provided only on http - HTTPS_ONLY :
+      Service is provided only on https - HTTP_AND_HTTPS : Service is provided both on http and https
+    </description>
+  </property>
+
 </configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/8dd0e81d/ambari-web/app/views/common/quick_view_link_view.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views/common/quick_view_link_view.js b/ambari-web/app/views/common/quick_view_link_view.js
index a8b8a40..0304559 100644
--- a/ambari-web/app/views/common/quick_view_link_view.js
+++ b/ambari-web/app/views/common/quick_view_link_view.js
@@ -309,7 +309,11 @@ App.QuickViewLinks = Em.View.extend({
     var hadoopSslEnabled = false;
     if (configProperties && configProperties.length > 0) {
       var site = configProperties.findProperty('type', 'core-site');
-      hadoopSslEnabled = (site && site.properties && site.properties['hadoop.ssl.enabled'] == true);
+      if (parseInt(App.get('currentStackVersionNumber')[0]) > 1) {
+        hadoopSslEnabled = (Em.get(site, 'properties') && site.properties['dfs.http.policy'] === 'HTTPS_ONLY');
+      } else {
+        hadoopSslEnabled = (Em.get(site, 'properties') &&  site.properties['hadoop.ssl.enabled'] == true);
+      }
     }
     switch (service_id) {
       case "GANGLIA":


[12/27] git commit: AMBARI-7315.Wizards: Wrong behavior on fast clicking Next button. (akovalenko)

Posted by jo...@apache.org.
AMBARI-7315.Wizards: Wrong behavior on fast clicking Next button. (akovalenko)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/66048a05
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/66048a05
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/66048a05

Branch: refs/heads/branch-alerts-dev
Commit: 66048a0539b2239bf2aafdaf897f5afb45f71e5d
Parents: 002a190
Author: Aleksandr Kovalenko <ak...@hortonworks.com>
Authored: Mon Sep 15 20:30:35 2014 +0300
Committer: Aleksandr Kovalenko <ak...@hortonworks.com>
Committed: Mon Sep 15 20:31:53 2014 +0300

----------------------------------------------------------------------
 .../app/controllers/wizard/step5_controller.js  | 38 ++++++++++++--------
 1 file changed, 24 insertions(+), 14 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/66048a05/ambari-web/app/controllers/wizard/step5_controller.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/controllers/wizard/step5_controller.js b/ambari-web/app/controllers/wizard/step5_controller.js
index 258e856..68342c7 100644
--- a/ambari-web/app/controllers/wizard/step5_controller.js
+++ b/ambari-web/app/controllers/wizard/step5_controller.js
@@ -92,6 +92,12 @@ App.WizardStep5Controller = Em.Controller.extend(App.BlueprintMixin, {
   submitDisabled: false,
 
   /**
+   * Is Submit-click processing now
+   * @type {bool}
+   */
+  submitButtonClicked: false,
+
+  /**
    * Trigger for executing host names check for components
    * Should de "triggered" when host changed for some component and when new multiple component is added/removed
    * @type {bool}
@@ -1075,30 +1081,34 @@ App.WizardStep5Controller = Em.Controller.extend(App.BlueprintMixin, {
 
   /**
    * Submit button click handler
-   * @metohd submit
+   * @method submit
    */
   submit: function () {
     var self = this;
+    if (!this.get('submitButtonClicked')) {
+      this.set('submitButtonClicked', true);
 
-    var goNextStepIfValid = function() {
-      if (!self.get('submitDisabled')) {
-        App.router.send('next');
-      }
-    };
+      var goNextStepIfValid = function () {
+        if (!self.get('submitDisabled')) {
+          App.router.send('next');
+        }
+        self.set('submitButtonClicked', false);
+      };
 
-    if (App.get('supports.serverRecommendValidate')) {
-      self.recommendAndValidate(function() {
-        self.showValidationIssuesAcceptBox(goNextStepIfValid);
-      });
-    } else {
-      self.updateIsSubmitDisabled();
-      goNextStepIfValid();
+      if (App.get('supports.serverRecommendValidate')) {
+        self.recommendAndValidate(function () {
+          self.showValidationIssuesAcceptBox(goNextStepIfValid);
+        });
+      } else {
+        self.updateIsSubmitDisabled();
+        goNextStepIfValid();
+      }
     }
   },
 
   /**
    * In case of any validation issues shows accept dialog box for user which allow cancel and fix issues or continue anyway
-   * @metohd submit
+   * @method showValidationIssuesAcceptBox
    */
   showValidationIssuesAcceptBox: function(callback) {
     var self = this;


[06/27] git commit: AMBARI-7266. Properly replace host names in Falcon properties during BP deployment.

Posted by jo...@apache.org.
AMBARI-7266.  Properly replace host names in Falcon properties during BP deployment.


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/1719a5ce
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/1719a5ce
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/1719a5ce

Branch: refs/heads/branch-alerts-dev
Commit: 1719a5ce20ebc4f0b109e9c9e2ff1ba23c23e58a
Parents: db9b776
Author: Robert Nettleton <rn...@hortonworks.com>
Authored: Mon Sep 15 10:05:27 2014 -0400
Committer: John Speidel <js...@hortonworks.com>
Committed: Mon Sep 15 10:05:27 2014 -0400

----------------------------------------------------------------------
 .../BlueprintConfigurationProcessor.java        | 30 +++++++++++++++++++-
 .../BlueprintConfigurationProcessorTest.java    | 21 ++++++++++++++
 2 files changed, 50 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/1719a5ce/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BlueprintConfigurationProcessor.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BlueprintConfigurationProcessor.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BlueprintConfigurationProcessor.java
index c246f83..c31a869 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BlueprintConfigurationProcessor.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BlueprintConfigurationProcessor.java
@@ -274,6 +274,17 @@ public class BlueprintConfigurationProcessor {
     return hosts;
   }
 
+
+  /**
+   * Provides package-level access to the map of single host topology updaters.
+   * This is useful for facilitating unit-testing of this class.
+   *
+   * @return the map of single host topology updaters
+   */
+  static Map<String, Map<String, PropertyUpdater>> getSingleHostTopologyUpdaters() {
+    return singleHostTopologyUpdaters;
+  }
+
   /**
    * Provides functionality to update a property value.
    */
@@ -296,7 +307,7 @@ public class BlueprintConfigurationProcessor {
    * Topology based updater which replaces the original host name of a property with the host name
    * which runs the associated (master) component in the new cluster.
    */
-  private static class SingleHostTopologyUpdater implements PropertyUpdater {
+  static class SingleHostTopologyUpdater implements PropertyUpdater {
     /**
      * Component name
      */
@@ -341,6 +352,16 @@ public class BlueprintConfigurationProcessor {
         }
       }
     }
+
+    /**
+     * Provides access to the name of the component associated
+     *   with this updater instance.
+     *
+     * @return component name for this updater
+     */
+    public String getComponentName() {
+      return this.component;
+    }
   }
 
   /**
@@ -603,6 +624,8 @@ public class BlueprintConfigurationProcessor {
     Map<String, PropertyUpdater> hiveSiteMap = new HashMap<String, PropertyUpdater>();
     Map<String, PropertyUpdater> oozieSiteMap = new HashMap<String, PropertyUpdater>();
     Map<String, PropertyUpdater> stormSiteMap = new HashMap<String, PropertyUpdater>();
+    Map<String, PropertyUpdater> falconStartupPropertiesMap = new HashMap<String, PropertyUpdater>();
+
 
     Map<String, PropertyUpdater> mapredEnvMap = new HashMap<String, PropertyUpdater>();
     Map<String, PropertyUpdater> hadoopEnvMap = new HashMap<String, PropertyUpdater>();
@@ -623,6 +646,7 @@ public class BlueprintConfigurationProcessor {
     singleHostTopologyUpdaters.put("hive-site", hiveSiteMap);
     singleHostTopologyUpdaters.put("oozie-site", oozieSiteMap);
     singleHostTopologyUpdaters.put("storm-site", stormSiteMap);
+    singleHostTopologyUpdaters.put("falcon-startup.properties", falconStartupPropertiesMap);
 
     mPropertyUpdaters.put("hadoop-env", hadoopEnvMap);
     mPropertyUpdaters.put("hbase-env", hbaseEnvMap);
@@ -686,6 +710,10 @@ public class BlueprintConfigurationProcessor {
     multiStormSiteMap.put("storm.zookeeper.servers",
         new YamlMultiValuePropertyDecorator(new MultipleHostTopologyUpdater("ZOOKEEPER_SERVER")));
 
+    // FALCON
+    falconStartupPropertiesMap.put("*.broker.url", new SingleHostTopologyUpdater("FALCON_SERVER"));
+
+
     // Required due to AMBARI-4933.  These no longer seem to be required as the default values in the stack
     // are now correct but are left here in case an existing blueprint still contains an old value.
     hadoopEnvMap.put("namenode_heapsize", new MPropertyUpdater());

http://git-wip-us.apache.org/repos/asf/ambari/blob/1719a5ce/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/BlueprintConfigurationProcessorTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/BlueprintConfigurationProcessorTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/BlueprintConfigurationProcessorTest.java
index a4b8ba9..3213b20 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/BlueprintConfigurationProcessorTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/BlueprintConfigurationProcessorTest.java
@@ -896,6 +896,27 @@ public class BlueprintConfigurationProcessorTest {
     assertEquals("jdbc:mysql://myHost.com/hive?createDatabaseIfNotExist=true", updatedVal);
   }
 
+  @Test
+  public void testFalconConfigPropertyUpdaterAdded() throws Exception {
+    Map<String, Map<String, BlueprintConfigurationProcessor.PropertyUpdater>> singleHostUpdaters =
+      BlueprintConfigurationProcessor.getSingleHostTopologyUpdaters();
+
+    assertTrue("Falcon startup.properties map was not added to the list of updater maps",
+               singleHostUpdaters.containsKey("falcon-startup.properties"));
+
+    Map<String, BlueprintConfigurationProcessor.PropertyUpdater> fieldsToUpdaters =
+      singleHostUpdaters.get("falcon-startup.properties");
+
+    assertTrue("Expected Falcon config property was not present in updater map",
+               fieldsToUpdaters.containsKey("*.broker.url"));
+
+    assertTrue("PropertyUpdater was not of the expected type for Falcon config property",
+               fieldsToUpdaters.get("*.broker.url") instanceof BlueprintConfigurationProcessor.SingleHostTopologyUpdater);
+
+    assertEquals("PropertyUpdater was not associated with the expected component name",
+                 "FALCON_SERVER", ((BlueprintConfigurationProcessor.SingleHostTopologyUpdater)fieldsToUpdaters.get("*.broker.url")).getComponentName());
+  }
+
   private class TestHostGroup implements HostGroup {
 
     private String name;


[13/27] git commit: AMBARI-7313. Services not starting up after enabling security.(vbrodetskyi)

Posted by jo...@apache.org.
AMBARI-7313. Services not starting up after enabling security.(vbrodetskyi)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/bee1d09b
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/bee1d09b
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/bee1d09b

Branch: refs/heads/branch-alerts-dev
Commit: bee1d09b0e82f8dea085b227528995dcf879dd23
Parents: 66048a0
Author: Vitaly Brodetskyi <vb...@hortonworks.com>
Authored: Mon Sep 15 20:40:52 2014 +0300
Committer: Vitaly Brodetskyi <vb...@hortonworks.com>
Committed: Mon Sep 15 20:40:52 2014 +0300

----------------------------------------------------------------------
 .../1.3.2/hooks/before-START/scripts/shared_initialization.py  | 3 ++-
 .../2.0.6/hooks/before-START/scripts/shared_initialization.py  | 3 ++-
 .../stacks/1.3.2/hooks/before-START/test_before_start.py       | 6 ++++--
 .../stacks/2.0.6/hooks/before-START/test_before_start.py       | 6 ++++--
 4 files changed, 12 insertions(+), 6 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/bee1d09b/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-START/scripts/shared_initialization.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-START/scripts/shared_initialization.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-START/scripts/shared_initialization.py
index 669414c..60956b0 100644
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-START/scripts/shared_initialization.py
+++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-START/scripts/shared_initialization.py
@@ -39,7 +39,8 @@ def setup_hadoop():
     Directory(params.hdfs_log_dir_prefix,
               recursive=True,
               owner='root',
-              group='root'
+              group=params.user_group,
+              mode=0775
     )
     Directory(params.hadoop_pid_dir_prefix,
               recursive=True,

http://git-wip-us.apache.org/repos/asf/ambari/blob/bee1d09b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/shared_initialization.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/shared_initialization.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/shared_initialization.py
index 0d00aca..3c77761 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/shared_initialization.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/shared_initialization.py
@@ -38,7 +38,8 @@ def setup_hadoop():
     Directory(params.hdfs_log_dir_prefix,
               recursive=True,
               owner='root',
-              group='root'
+              group=params.user_group,
+              mode=0775
     )
     Directory(params.hadoop_pid_dir_prefix,
               recursive=True,

http://git-wip-us.apache.org/repos/asf/ambari/blob/bee1d09b/ambari-server/src/test/python/stacks/1.3.2/hooks/before-START/test_before_start.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/1.3.2/hooks/before-START/test_before_start.py b/ambari-server/src/test/python/stacks/1.3.2/hooks/before-START/test_before_start.py
index 112b3e8..d569a95 100644
--- a/ambari-server/src/test/python/stacks/1.3.2/hooks/before-START/test_before_start.py
+++ b/ambari-server/src/test/python/stacks/1.3.2/hooks/before-START/test_before_start.py
@@ -41,7 +41,8 @@ class TestHookBeforeStart(RMFTestCase):
                               )
     self.assertResourceCalled('Directory', '/var/log/hadoop',
                               owner = 'root',
-                              group = 'root',
+                              group = 'hadoop',
+                              mode = 0775,
                               recursive = True,
                               )
     self.assertResourceCalled('Directory', '/var/run/hadoop',
@@ -99,7 +100,8 @@ class TestHookBeforeStart(RMFTestCase):
                               )
     self.assertResourceCalled('Directory', '/var/log/hadoop',
                               owner = 'root',
-                              group = 'root',
+                              group = 'hadoop',
+                              mode = 0775,
                               recursive = True,
                               )
     self.assertResourceCalled('Directory', '/var/run/hadoop',

http://git-wip-us.apache.org/repos/asf/ambari/blob/bee1d09b/ambari-server/src/test/python/stacks/2.0.6/hooks/before-START/test_before_start.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/hooks/before-START/test_before_start.py b/ambari-server/src/test/python/stacks/2.0.6/hooks/before-START/test_before_start.py
index 1eafe66..54b956e 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/hooks/before-START/test_before_start.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/hooks/before-START/test_before_start.py
@@ -40,7 +40,8 @@ class TestHookBeforeStart(RMFTestCase):
                               )
     self.assertResourceCalled('Directory', '/var/log/hadoop',
                               owner = 'root',
-                              group = 'root',
+                              group = 'hadoop',
+                              mode = 0775,
                               recursive = True,
                               )
     self.assertResourceCalled('Directory', '/var/run/hadoop',
@@ -100,7 +101,8 @@ class TestHookBeforeStart(RMFTestCase):
                               )
     self.assertResourceCalled('Directory', '/var/log/hadoop',
                               owner = 'root',
-                              group = 'root',
+                              group = 'hadoop',
+                              mode = 0775,
                               recursive = True,
                               )
     self.assertResourceCalled('Directory', '/var/run/hadoop',


[05/27] git commit: AMBARI-7305 Nimbus can not be started on CentOS5.9 (dsen)

Posted by jo...@apache.org.
AMBARI-7305 Nimbus can not be started on CentOS5.9 (dsen)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/db9b7763
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/db9b7763
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/db9b7763

Branch: refs/heads/branch-alerts-dev
Commit: db9b7763f5f69b4a4d03352b668b980d2576f8b3
Parents: 258f454
Author: Dmytro Sen <ds...@hortonworks.com>
Authored: Mon Sep 15 15:28:41 2014 +0300
Committer: Dmytro Sen <ds...@hortonworks.com>
Committed: Mon Sep 15 15:28:41 2014 +0300

----------------------------------------------------------------------
 .../stacks/HDP/2.1/services/STORM/configuration/storm-env.xml      | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/db9b7763/ambari-server/src/main/resources/stacks/HDP/2.1/services/STORM/configuration/storm-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.1/services/STORM/configuration/storm-env.xml b/ambari-server/src/main/resources/stacks/HDP/2.1/services/STORM/configuration/storm-env.xml
index d3804ee..c663116 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.1/services/STORM/configuration/storm-env.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.1/services/STORM/configuration/storm-env.xml
@@ -48,7 +48,7 @@
 # Set Storm specific environment variables here.
 
 # The java implementation to use.
-export JAVA_HOME={{java_home}}
+export JAVA_HOME={{java64_home}}
 
 # export STORM_CONF_DIR=""
     </value>


[25/27] git commit: AMBARI-7321. Slider view: Iron out story on how app-packages are deployed and instances created - fixes (srimanth)

Posted by jo...@apache.org.
AMBARI-7321. Slider view: Iron out story on how app-packages are deployed and instances created - fixes (srimanth)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/127978c2
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/127978c2
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/127978c2

Branch: refs/heads/branch-alerts-dev
Commit: 127978c2463d8cd8ad55d3ccee276cb350b02b9a
Parents: 63f5ed2
Author: Srimanth Gunturi <sg...@hortonworks.com>
Authored: Tue Sep 16 00:46:38 2014 -0700
Committer: Srimanth Gunturi <sg...@hortonworks.com>
Committed: Tue Sep 16 00:46:38 2014 -0700

----------------------------------------------------------------------
 .../apache/ambari/view/slider/SliderAppsViewControllerImpl.java    | 2 +-
 .../views/slider/src/main/resources/ui/app/styles/application.less | 1 -
 2 files changed, 1 insertion(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/127978c2/contrib/views/slider/src/main/java/org/apache/ambari/view/slider/SliderAppsViewControllerImpl.java
----------------------------------------------------------------------
diff --git a/contrib/views/slider/src/main/java/org/apache/ambari/view/slider/SliderAppsViewControllerImpl.java b/contrib/views/slider/src/main/java/org/apache/ambari/view/slider/SliderAppsViewControllerImpl.java
index cc35dec..04533ee 100644
--- a/contrib/views/slider/src/main/java/org/apache/ambari/view/slider/SliderAppsViewControllerImpl.java
+++ b/contrib/views/slider/src/main/java/org/apache/ambari/view/slider/SliderAppsViewControllerImpl.java
@@ -681,7 +681,7 @@ public class SliderAppsViewControllerImpl implements SliderAppsViewController {
       
       final ActionInstallPackageArgs installArgs = new ActionInstallPackageArgs();
       installArgs.name = appName;
-      installArgs.packageURI = getAppsFolderPath() + configs.get("application.def").getAsString();
+      installArgs.packageURI = getAppsFolderPath() + "/" + configs.get("application.def").getAsString();
       installArgs.replacePkg = true;
 
       return invokeSliderClientRunnable(new SliderClientContextRunnable<String>() {

http://git-wip-us.apache.org/repos/asf/ambari/blob/127978c2/contrib/views/slider/src/main/resources/ui/app/styles/application.less
----------------------------------------------------------------------
diff --git a/contrib/views/slider/src/main/resources/ui/app/styles/application.less b/contrib/views/slider/src/main/resources/ui/app/styles/application.less
index c193d3e..b934a20 100644
--- a/contrib/views/slider/src/main/resources/ui/app/styles/application.less
+++ b/contrib/views/slider/src/main/resources/ui/app/styles/application.less
@@ -38,7 +38,6 @@ a {
 
 #slider-apps-table  {
   .create-app {
-    margin-top:27px;
     margin-bottom: 20px;
   }
 #slider-table {


[03/27] git commit: AMBARI-7303. Manage Ambari and Dashboard should NOT open in named tabs. (onechiporenko)

Posted by jo...@apache.org.
AMBARI-7303. Manage Ambari and Dashboard should NOT open in named tabs. (onechiporenko)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/ae0b1f36
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/ae0b1f36
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/ae0b1f36

Branch: refs/heads/branch-alerts-dev
Commit: ae0b1f36f32233810bda17bd16fccfe047ee0066
Parents: a96b3f8
Author: Oleg Nechiporenko <on...@apache.org>
Authored: Mon Sep 15 13:56:31 2014 +0300
Committer: Oleg Nechiporenko <on...@apache.org>
Committed: Mon Sep 15 13:56:31 2014 +0300

----------------------------------------------------------------------
 .../src/main/resources/ui/admin-web/app/views/leftNavbar.html      | 2 +-
 ambari-web/app/templates/application.hbs                           | 2 +-
 2 files changed, 2 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/ae0b1f36/ambari-admin/src/main/resources/ui/admin-web/app/views/leftNavbar.html
----------------------------------------------------------------------
diff --git a/ambari-admin/src/main/resources/ui/admin-web/app/views/leftNavbar.html b/ambari-admin/src/main/resources/ui/admin-web/app/views/leftNavbar.html
index b1ab544..4da46d7 100644
--- a/ambari-admin/src/main/resources/ui/admin-web/app/views/leftNavbar.html
+++ b/ambari-admin/src/main/resources/ui/admin-web/app/views/leftNavbar.html
@@ -57,7 +57,7 @@
           <li ng-class="{active: isActive('clusters.manageAccess')}">
             <a href="#/clusters/{{cluster.Clusters.cluster_name}}/manageAccess" class="permissions">Permissions</a>
           </li>
-          <li><a href="/#/main/dashboard" class="gotodashboard" target="{{cluster.Clusters.cluster_name}}">Go to Dashboard</a></li>
+          <li><a href="/#/main/dashboard" class="gotodashboard">Go to Dashboard</a></li>
         </ul>
       </div>
         

http://git-wip-us.apache.org/repos/asf/ambari/blob/ae0b1f36/ambari-web/app/templates/application.hbs
----------------------------------------------------------------------
diff --git a/ambari-web/app/templates/application.hbs b/ambari-web/app/templates/application.hbs
index 002c590..6401500 100644
--- a/ambari-web/app/templates/application.hbs
+++ b/ambari-web/app/templates/application.hbs
@@ -52,7 +52,7 @@
               <ul class="dropdown-menu">
                   <li><a href="" {{action showAboutPopup target="controller"}}>{{t app.aboutAmbari}}</a></li>
                   {{#if App.isAdmin}}{{#unless App.isOperator}}
-                      <li><a href="/views/ADMIN_VIEW/1.0.0/INSTANCE/#/" target="adminconsole">{{t app.manageAmbari}}</a></li>
+                      <li><a href="/views/ADMIN_VIEW/1.0.0/INSTANCE/#/">{{t app.manageAmbari}}</a></li>
                   {{/unless}}{{/if}}
                 {{#if isClusterDataLoaded}}
                   {{#if App.isAdmin}}


[14/27] git commit: AMBARI-7314 - Views : change view status names

Posted by jo...@apache.org.
AMBARI-7314 - Views : change view status names


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/610bb1e7
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/610bb1e7
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/610bb1e7

Branch: refs/heads/branch-alerts-dev
Commit: 610bb1e7644112f159b15f9fc6f4e51a3350b406
Parents: bee1d09
Author: tbeerbower <tb...@hortonworks.com>
Authored: Mon Sep 15 14:31:16 2014 -0400
Committer: tbeerbower <tb...@hortonworks.com>
Committed: Mon Sep 15 15:10:31 2014 -0400

----------------------------------------------------------------------
 .../AmbariPrivilegeResourceProvider.java        |  2 +-
 .../internal/ViewInstanceResourceProvider.java  | 10 ++++-----
 .../ViewPermissionResourceProvider.java         |  4 ++--
 .../internal/ViewPrivilegeResourceProvider.java |  8 +++----
 .../ambari/server/orm/entities/ViewEntity.java  |  8 +++----
 .../apache/ambari/server/view/ViewRegistry.java |  4 ++--
 .../AmbariPrivilegeResourceProviderTest.java    |  2 +-
 .../ViewInstanceResourceProviderTest.java       | 10 ++++-----
 .../ViewPermissionResourceProviderTest.java     |  4 ++--
 .../ViewPrivilegeResourceProviderTest.java      |  2 +-
 .../server/orm/entities/ViewEntityTest.java     | 22 ++++++++++----------
 .../ambari/server/view/ViewRegistryTest.java    |  5 ++---
 .../org/apache/ambari/view/ViewDefinition.java  |  8 +++----
 13 files changed, 44 insertions(+), 45 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/610bb1e7/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AmbariPrivilegeResourceProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AmbariPrivilegeResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AmbariPrivilegeResourceProvider.java
index 85e5906..f4839e9 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AmbariPrivilegeResourceProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AmbariPrivilegeResourceProvider.java
@@ -125,7 +125,7 @@ public class AmbariPrivilegeResourceProvider extends PrivilegeResourceProvider<O
     //add view entities
     ViewRegistry viewRegistry = ViewRegistry.getInstance();
     for (ViewEntity viewEntity : viewRegistry.getDefinitions()) {
-      if (viewEntity.isLoaded()) {
+      if (viewEntity.isDeployed()) {
         for (ViewInstanceEntity viewInstanceEntity : viewEntity.getInstances()) {
           resourceEntities.put(viewInstanceEntity.getResource().getId(), viewInstanceEntity);
         }

http://git-wip-us.apache.org/repos/asf/ambari/blob/610bb1e7/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ViewInstanceResourceProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ViewInstanceResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ViewInstanceResourceProvider.java
index 09b63a4..9a48b41 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ViewInstanceResourceProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ViewInstanceResourceProvider.java
@@ -146,7 +146,7 @@ public class ViewInstanceResourceProvider extends AbstractResourceProvider {
 
       for (ViewEntity viewDefinition : viewRegistry.getDefinitions()){
         // do not report instances for views that are not loaded.
-        if (viewDefinition.isLoaded()){
+        if (viewDefinition.isDeployed()){
           if (viewName == null || viewName.equals(viewDefinition.getCommonName())) {
             for (ViewInstanceEntity viewInstanceDefinition : viewRegistry.getInstanceDefinitions(viewDefinition)) {
               if (instanceName == null || instanceName.equals(viewInstanceDefinition.getName())) {
@@ -348,8 +348,8 @@ public class ViewInstanceResourceProvider extends AbstractResourceProvider {
             throw new IllegalStateException("The view " + viewName + " is not registered.");
           }
 
-          // the view must be in the LOADED state to create an instance
-          if (!view.isLoaded()) {
+          // the view must be in the DEPLOYED state to create an instance
+          if (!view.isDeployed()) {
             throw new IllegalStateException("The view " + viewName + " is not loaded.");
           }
 
@@ -393,8 +393,8 @@ public class ViewInstanceResourceProvider extends AbstractResourceProvider {
         Set<ViewInstanceEntity> viewInstanceEntities = new HashSet<ViewInstanceEntity>();
 
         for (ViewEntity viewEntity : viewRegistry.getDefinitions()){
-          // the view must be in the LOADED state to delete an instance
-          if (viewEntity.isLoaded()) {
+          // the view must be in the DEPLOYED state to delete an instance
+          if (viewEntity.isDeployed()) {
             for (ViewInstanceEntity viewInstanceEntity : viewRegistry.getInstanceDefinitions(viewEntity)){
               Resource resource = toResource(viewInstanceEntity, requestedIds);
               if (predicate == null || predicate.evaluate(resource)) {

http://git-wip-us.apache.org/repos/asf/ambari/blob/610bb1e7/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ViewPermissionResourceProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ViewPermissionResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ViewPermissionResourceProvider.java
index 5f025d9..979e214 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ViewPermissionResourceProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ViewPermissionResourceProvider.java
@@ -127,7 +127,7 @@ public class ViewPermissionResourceProvider extends AbstractResourceProvider {
         ViewEntity viewEntity = viewRegistry.getDefinition(viewName.toString(), viewVersion.toString());
 
         // do not report permissions for views that are not loaded.
-        if (viewEntity.isLoaded()) {
+        if (viewEntity.isDeployed()) {
           resources.add(toResource(viewUsePermission, viewEntity.getResourceType(), viewEntity, requestedIds));
         }
       }
@@ -138,7 +138,7 @@ public class ViewPermissionResourceProvider extends AbstractResourceProvider {
 
       ViewEntity viewEntity = viewRegistry.getDefinition(resourceType);
 
-      if (viewEntity != null && viewEntity.isLoaded()) {
+      if (viewEntity != null && viewEntity.isDeployed()) {
         resources.add(toResource(permissionEntity, resourceType, viewEntity, requestedIds));
       }
     }

http://git-wip-us.apache.org/repos/asf/ambari/blob/610bb1e7/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ViewPrivilegeResourceProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ViewPrivilegeResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ViewPrivilegeResourceProvider.java
index f2f488a..2c016e4 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ViewPrivilegeResourceProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ViewPrivilegeResourceProvider.java
@@ -118,7 +118,7 @@ public class ViewPrivilegeResourceProvider extends PrivilegeResourceProvider<Vie
 
       ViewEntity view = viewInstanceEntity.getViewEntity();
 
-      return view.isLoaded() ?
+      return view.isDeployed() ?
           Collections.singletonMap(viewInstanceEntity.getResource().getId(), viewInstanceEntity) :
           Collections.<Long, ViewInstanceEntity>emptyMap();
     }
@@ -141,7 +141,7 @@ public class ViewPrivilegeResourceProvider extends PrivilegeResourceProvider<Vie
     Map<Long, ViewInstanceEntity> resourceEntities = new HashMap<Long, ViewInstanceEntity>();
 
     for (ViewEntity viewEntity : viewEntities) {
-      if (viewEntity.isLoaded()) {
+      if (viewEntity.isDeployed()) {
         for (ViewInstanceEntity viewInstanceEntity : viewEntity.getInstances()) {
           resourceEntities.put(viewInstanceEntity.getResource().getId(), viewInstanceEntity);
         }
@@ -164,7 +164,7 @@ public class ViewPrivilegeResourceProvider extends PrivilegeResourceProvider<Vie
 
       ViewEntity view = viewInstanceEntity.getViewEntity();
 
-      return view.isLoaded() ? viewInstanceEntity.getResource().getId() : null;
+      return view.isDeployed() ? viewInstanceEntity.getResource().getId() : null;
     }
     return null;
   }
@@ -189,7 +189,7 @@ public class ViewPrivilegeResourceProvider extends PrivilegeResourceProvider<Vie
       ViewInstanceEntity viewInstanceEntity = resourceEntities.get(privilegeEntity.getResource().getId());
       ViewEntity         viewEntity         = viewInstanceEntity.getViewEntity();
 
-      if (!viewEntity.isLoaded()) {
+      if (!viewEntity.isDeployed()) {
         return null;
       }
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/610bb1e7/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ViewEntity.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ViewEntity.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ViewEntity.java
index b6ec922..d42e1a0 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ViewEntity.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ViewEntity.java
@@ -777,12 +777,12 @@ public class ViewEntity implements ViewDefinition {
   }
 
   /**
-   * Determine whether or not the entity is loaded.
+   * Determine whether or not the entity is deployed.
    *
-   * @return true if the entity is loaded
+   * @return true if the entity is deployed
    */
-  public boolean isLoaded() {
-    return status.equals(ViewStatus.LOADED);
+  public boolean isDeployed() {
+    return status.equals(ViewStatus.DEPLOYED);
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/ambari/blob/610bb1e7/ambari-server/src/main/java/org/apache/ambari/server/view/ViewRegistry.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/view/ViewRegistry.java b/ambari-server/src/main/java/org/apache/ambari/server/view/ViewRegistry.java
index 1c6c792..17d77a6 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/view/ViewRegistry.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/view/ViewRegistry.java
@@ -1188,7 +1188,7 @@ public class ViewRegistry {
                                                   File extractedArchiveDirFile,
                                                   ViewConfig viewConfig) {
 
-    setViewStatus(viewDefinition, ViewEntity.ViewStatus.LOADING, "Loading " + extractedArchiveDirFile + ".");
+    setViewStatus(viewDefinition, ViewEntity.ViewStatus.DEPLOYING, "Deploying " + extractedArchiveDirFile + ".");
 
     String extractedArchiveDirPath = extractedArchiveDirFile.getAbsolutePath();
 
@@ -1217,7 +1217,7 @@ public class ViewRegistry {
         addInstanceDefinition(viewDefinition, instanceEntity);
         handlerList.addViewInstance(instanceEntity);
       }
-      setViewStatus(viewDefinition, ViewEntity.ViewStatus.LOADED, "Loaded " + extractedArchiveDirPath + ".");
+      setViewStatus(viewDefinition, ViewEntity.ViewStatus.DEPLOYED, "Deployed " + extractedArchiveDirPath + ".");
 
     } catch (Exception e) {
       String msg = "Caught exception loading view " + viewDefinition.getViewName();

http://git-wip-us.apache.org/repos/asf/ambari/blob/610bb1e7/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/AmbariPrivilegeResourceProviderTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/AmbariPrivilegeResourceProviderTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/AmbariPrivilegeResourceProviderTest.java
index fa342fe..1c58fbe 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/AmbariPrivilegeResourceProviderTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/AmbariPrivilegeResourceProviderTest.java
@@ -203,7 +203,7 @@ public class AmbariPrivilegeResourceProviderTest {
     expect(viewInstanceEntity.getViewEntity()).andReturn(viewEntity).anyTimes();
     expect(viewEntity.getCommonName()).andReturn("view").anyTimes();
     expect(viewEntity.getVersion()).andReturn("1.0.1").anyTimes();
-    expect(viewEntity.isLoaded()).andReturn(true).anyTimes();
+    expect(viewEntity.isDeployed()).andReturn(true).anyTimes();
     expect(viewInstanceEntity.getName()).andReturn("inst1").anyTimes();
     expect(viewInstanceEntity.getResource()).andReturn(viewResourceEntity).anyTimes();
     expect(viewUserEntity.getPrincipal()).andReturn(viewPrincipalEntity).anyTimes();

http://git-wip-us.apache.org/repos/asf/ambari/blob/610bb1e7/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ViewInstanceResourceProviderTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ViewInstanceResourceProviderTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ViewInstanceResourceProviderTest.java
index 4a429b6..8f34916 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ViewInstanceResourceProviderTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ViewInstanceResourceProviderTest.java
@@ -115,7 +115,7 @@ public class ViewInstanceResourceProviderTest {
     viewInstanceEntity.setName("I1");
 
     ViewEntity viewEntity = new ViewEntity();
-    viewEntity.setStatus(ViewDefinition.ViewStatus.LOADED);
+    viewEntity.setStatus(ViewDefinition.ViewStatus.DEPLOYED);
     viewEntity.setName("V1{1.0.0}");
 
     viewInstanceEntity.setViewEntity(viewEntity);
@@ -155,7 +155,7 @@ public class ViewInstanceResourceProviderTest {
     viewInstanceEntity.setName("I1");
 
     ViewEntity viewEntity = new ViewEntity();
-    viewEntity.setStatus(ViewDefinition.ViewStatus.LOADED);
+    viewEntity.setStatus(ViewDefinition.ViewStatus.DEPLOYED);
     viewEntity.setName("V1{1.0.0}");
 
     viewInstanceEntity.setViewEntity(viewEntity);
@@ -192,7 +192,7 @@ public class ViewInstanceResourceProviderTest {
 
     ViewEntity viewEntity = new ViewEntity();
     viewEntity.setName("V1{1.0.0}");
-    viewEntity.setStatus(ViewDefinition.ViewStatus.LOADING);
+    viewEntity.setStatus(ViewDefinition.ViewStatus.DEPLOYING);
     ViewInstanceEntity viewInstanceEntity = new ViewInstanceEntity();
     viewInstanceEntity.setViewName("V1{1.0.0}");
     viewInstanceEntity.setName("I1");
@@ -230,7 +230,7 @@ public class ViewInstanceResourceProviderTest {
         predicateBuilder.property(ViewInstanceResourceProvider.VIEW_NAME_PROPERTY_ID).equals("V1").toPredicate();
     ViewEntity viewEntity = new ViewEntity();
     viewEntity.setName("V1{1.0.0}");
-    viewEntity.setStatus(ViewDefinition.ViewStatus.LOADING);
+    viewEntity.setStatus(ViewDefinition.ViewStatus.DEPLOYING);
     ViewInstanceEntity viewInstanceEntity = new ViewInstanceEntity();
     viewInstanceEntity.setViewName("V1{1.0.0}");
     viewInstanceEntity.setName("I1");
@@ -267,7 +267,7 @@ public class ViewInstanceResourceProviderTest {
 
     ViewEntity viewEntity = new ViewEntity();
     viewEntity.setName("V1{1.0.0}");
-    viewEntity.setStatus(ViewDefinition.ViewStatus.LOADING);
+    viewEntity.setStatus(ViewDefinition.ViewStatus.DEPLOYING);
     ViewInstanceEntity viewInstanceEntity = new ViewInstanceEntity();
     viewInstanceEntity.setViewName("V1{1.0.0}");
     viewInstanceEntity.setName("I1");

http://git-wip-us.apache.org/repos/asf/ambari/blob/610bb1e7/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ViewPermissionResourceProviderTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ViewPermissionResourceProviderTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ViewPermissionResourceProviderTest.java
index eebe0d3..f863ed3 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ViewPermissionResourceProviderTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ViewPermissionResourceProviderTest.java
@@ -83,7 +83,7 @@ public class ViewPermissionResourceProviderTest {
     expect(permissionEntity.getResourceType()).andReturn(resourceTypeEntity);
     expect(resourceTypeEntity.getName()).andReturn("V1");
 
-    expect(viewEntity.isLoaded()).andReturn(true).anyTimes();
+    expect(viewEntity.isDeployed()).andReturn(true).anyTimes();
     expect(viewEntity.getCommonName()).andReturn("V1").anyTimes();
     expect(viewEntity.getVersion()).andReturn("1.0.0").anyTimes();
 
@@ -118,7 +118,7 @@ public class ViewPermissionResourceProviderTest {
 
     expect(permissionEntity.getResourceType()).andReturn(resourceTypeEntity);
 
-    expect(viewEntity.isLoaded()).andReturn(false).anyTimes();
+    expect(viewEntity.isDeployed()).andReturn(false).anyTimes();
 
     expect(viewRegistry.getDefinition(resourceTypeEntity)).andReturn(viewEntity);
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/610bb1e7/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ViewPrivilegeResourceProviderTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ViewPrivilegeResourceProviderTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ViewPrivilegeResourceProviderTest.java
index 0e9d3d6..e1a4da3 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ViewPrivilegeResourceProviderTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ViewPrivilegeResourceProviderTest.java
@@ -102,7 +102,7 @@ public class ViewPrivilegeResourceProviderTest {
 
     viewDefinition.addInstanceDefinition(viewInstanceDefinition);
     viewInstanceDefinition.setViewEntity(viewDefinition);
-    viewDefinition.setStatus(ViewDefinition.ViewStatus.LOADED);
+    viewDefinition.setStatus(ViewDefinition.ViewStatus.DEPLOYED);
 
     ViewRegistry registry = ViewRegistry.getInstance();
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/610bb1e7/ambari-server/src/test/java/org/apache/ambari/server/orm/entities/ViewEntityTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/orm/entities/ViewEntityTest.java b/ambari-server/src/test/java/org/apache/ambari/server/orm/entities/ViewEntityTest.java
index be0d3a6..965cebb 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/orm/entities/ViewEntityTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/orm/entities/ViewEntityTest.java
@@ -249,11 +249,11 @@ public class ViewEntityTest {
     viewDefinition.setStatus(ViewDefinition.ViewStatus.PENDING);
     Assert.assertEquals(ViewDefinition.ViewStatus.PENDING, viewDefinition.getStatus());
 
-    viewDefinition.setStatus(ViewDefinition.ViewStatus.LOADING);
-    Assert.assertEquals(ViewDefinition.ViewStatus.LOADING, viewDefinition.getStatus());
+    viewDefinition.setStatus(ViewDefinition.ViewStatus.DEPLOYING);
+    Assert.assertEquals(ViewDefinition.ViewStatus.DEPLOYING, viewDefinition.getStatus());
 
-    viewDefinition.setStatus(ViewDefinition.ViewStatus.LOADED);
-    Assert.assertEquals(ViewDefinition.ViewStatus.LOADED, viewDefinition.getStatus());
+    viewDefinition.setStatus(ViewDefinition.ViewStatus.DEPLOYED);
+    Assert.assertEquals(ViewDefinition.ViewStatus.DEPLOYED, viewDefinition.getStatus());
 
     viewDefinition.setStatus(ViewDefinition.ViewStatus.ERROR);
     Assert.assertEquals(ViewDefinition.ViewStatus.ERROR, viewDefinition.getStatus());
@@ -268,20 +268,20 @@ public class ViewEntityTest {
   }
 
   @Test
-  public void testIsLoaded() throws Exception {
+  public void testisDeployed() throws Exception {
     ViewEntity viewDefinition = getViewEntity();
 
     viewDefinition.setStatus(ViewDefinition.ViewStatus.PENDING);
-    Assert.assertFalse(viewDefinition.isLoaded());
+    Assert.assertFalse(viewDefinition.isDeployed());
 
-    viewDefinition.setStatus(ViewDefinition.ViewStatus.LOADING);
-    Assert.assertFalse(viewDefinition.isLoaded());
+    viewDefinition.setStatus(ViewDefinition.ViewStatus.DEPLOYING);
+    Assert.assertFalse(viewDefinition.isDeployed());
 
-    viewDefinition.setStatus(ViewDefinition.ViewStatus.LOADED);
-    Assert.assertTrue(viewDefinition.isLoaded());
+    viewDefinition.setStatus(ViewDefinition.ViewStatus.DEPLOYED);
+    Assert.assertTrue(viewDefinition.isDeployed());
 
     viewDefinition.setStatus(ViewDefinition.ViewStatus.ERROR);
-    Assert.assertFalse(viewDefinition.isLoaded());
+    Assert.assertFalse(viewDefinition.isDeployed());
   }
 
   @Test

http://git-wip-us.apache.org/repos/asf/ambari/blob/610bb1e7/ambari-server/src/test/java/org/apache/ambari/server/view/ViewRegistryTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/view/ViewRegistryTest.java b/ambari-server/src/test/java/org/apache/ambari/server/view/ViewRegistryTest.java
index 0915325..8e36dba 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/view/ViewRegistryTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/view/ViewRegistryTest.java
@@ -84,7 +84,6 @@ import org.apache.ambari.view.events.Listener;
 import org.easymock.EasyMock;
 import org.junit.Assert;
 import org.junit.Before;
-import org.junit.Ignore;
 import org.junit.Test;
 import org.springframework.security.core.GrantedAuthority;
 
@@ -296,13 +295,13 @@ public class ViewRegistryTest {
 
     // Wait for the view load to complete.
     long timeout = System.currentTimeMillis() + 10000L;
-    while ((view == null || !view.getStatus().equals(ViewDefinition.ViewStatus.LOADED))&&
+    while ((view == null || !view.getStatus().equals(ViewDefinition.ViewStatus.DEPLOYED))&&
         System.currentTimeMillis() < timeout) {
       view = registry.getDefinition("MY_VIEW", "1.0.0");
     }
 
     Assert.assertNotNull(view);
-    Assert.assertEquals(ViewDefinition.ViewStatus.LOADED, view.getStatus());
+    Assert.assertEquals(ViewDefinition.ViewStatus.DEPLOYED, view.getStatus());
 
     Assert.assertEquals(2, registry.getInstanceDefinitions(view).size());
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/610bb1e7/ambari-views/src/main/java/org/apache/ambari/view/ViewDefinition.java
----------------------------------------------------------------------
diff --git a/ambari-views/src/main/java/org/apache/ambari/view/ViewDefinition.java b/ambari-views/src/main/java/org/apache/ambari/view/ViewDefinition.java
index 18b86ff..f378dde 100644
--- a/ambari-views/src/main/java/org/apache/ambari/view/ViewDefinition.java
+++ b/ambari-views/src/main/java/org/apache/ambari/view/ViewDefinition.java
@@ -79,9 +79,9 @@ public interface ViewDefinition {
    * View status
    */
   public enum ViewStatus {
-    PENDING, // view has been created but not loaded from the archive
-    LOADING, // view is in the process of being loaded from the archive
-    LOADED,  // view is completely loaded and ready to use
-    ERROR    // an error occurred loading the view
+    PENDING,   // view has been created but not loaded from the archive
+    DEPLOYING, // view is in the process of being deployed from the archive
+    DEPLOYED,  // view is completely deployed and ready to use
+    ERROR      // an error occurred deploying the view
   }
 }


[17/27] AMBARI-7296. HCatalog and WebHCat services should not be managed as separate service (should be part of Hive service) (jaimin)

Posted by jo...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-server/src/test/python/stacks/1.3.2/HIVE/test_webhcat_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/1.3.2/HIVE/test_webhcat_server.py b/ambari-server/src/test/python/stacks/1.3.2/HIVE/test_webhcat_server.py
new file mode 100644
index 0000000..75496c2
--- /dev/null
+++ b/ambari-server/src/test/python/stacks/1.3.2/HIVE/test_webhcat_server.py
@@ -0,0 +1,258 @@
+#!/usr/bin/env python
+
+'''
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+'''
+from mock.mock import MagicMock, patch
+from stacks.utils.RMFTestCase import *
+
+class TestWebHCatServer(RMFTestCase):
+
+  def test_configure_default(self):
+    self.executeScript("1.3.2/services/HIVE/package/scripts/webhcat_server.py",
+                       classname = "WebHCatServer",
+                       command = "configure",
+                       config_file="default.json"
+    )
+    self.assert_configure_default()
+    self.assertNoMoreResources()
+
+  def test_start_default(self):
+    self.executeScript("1.3.2/services/HIVE/package/scripts/webhcat_server.py",
+                       classname = "WebHCatServer",
+                       command = "start",
+                       config_file="default.json"
+    )
+
+    self.assert_configure_default()
+    self.assertResourceCalled('Execute', 'env HADOOP_HOME=/usr /usr/lib/hcatalog/sbin/webhcat_server.sh start',
+                              not_if = 'ls /var/run/webhcat/webhcat.pid >/dev/null 2>&1 && ps `cat /var/run/webhcat/webhcat.pid` >/dev/null 2>&1',
+                              user = 'hcat'
+    )
+    self.assertNoMoreResources()
+
+  def test_stop_default(self):
+    self.executeScript("1.3.2/services/HIVE/package/scripts/webhcat_server.py",
+                       classname = "WebHCatServer",
+                       command = "stop",
+                       config_file="default.json"
+    )
+
+    self.assertResourceCalled('Execute', 'env HADOOP_HOME=/usr /usr/lib/hcatalog/sbin/webhcat_server.sh stop',
+                              user = 'hcat',
+                              )
+    self.assertResourceCalled('Execute', 'rm -f /var/run/webhcat/webhcat.pid')
+    self.assertNoMoreResources()
+
+    def test_configure_secured(self):
+      self.executeScript("1.3.2/services/HIVE/package/scripts/webhcat_server.py",
+                         classname = "WebHCatServer",
+                         command = "configure",
+                         config_file="secured.json"
+      )
+
+      self.assert_configure_secured()
+      self.assertNoMoreResources()
+
+  def test_start_secured(self):
+    self.executeScript("1.3.2/services/HIVE/package/scripts/webhcat_server.py",
+                       classname = "WebHCatServer",
+                       command = "start",
+                       config_file="secured.json"
+    )
+
+    self.assert_configure_secured()
+    self.assertResourceCalled('Execute', 'env HADOOP_HOME=/usr /usr/lib/hcatalog/sbin/webhcat_server.sh start',
+                              not_if = 'ls /var/run/webhcat/webhcat.pid >/dev/null 2>&1 && ps `cat /var/run/webhcat/webhcat.pid` >/dev/null 2>&1',
+                              user = 'hcat'
+    )
+    self.assertNoMoreResources()
+
+  def test_stop_secured(self):
+    self.executeScript("1.3.2/services/HIVE/package/scripts/webhcat_server.py",
+                       classname = "WebHCatServer",
+                       command = "stop",
+                       config_file="secured.json"
+    )
+
+    self.assertResourceCalled('Execute', 'env HADOOP_HOME=/usr /usr/lib/hcatalog/sbin/webhcat_server.sh stop',
+                              user = 'hcat',
+                              )
+    self.assertResourceCalled('Execute', 'rm -f /var/run/webhcat/webhcat.pid')
+    self.assertNoMoreResources()
+
+  def assert_configure_default(self):
+    self.assertResourceCalled('HdfsDirectory', '/user/hcat',
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = "/usr/bin/kinit",
+                              mode = 0755,
+                              owner = 'hcat',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', '/apps/webhcat',
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = "/usr/bin/kinit",
+                              mode = 0755,
+                              owner = 'hcat',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', None,
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = "/usr/bin/kinit",
+                              action = ['create'],
+                              )
+    self.assertResourceCalled('Directory', '/var/run/webhcat',
+      owner = 'hcat',
+      group = 'hadoop',
+      recursive = True,
+      mode = 0755,
+    )
+    self.assertResourceCalled('Directory', '/var/log/webhcat',
+      owner = 'hcat',
+      group = 'hadoop',
+      recursive = True,
+      mode = 0755,
+    )
+    self.assertResourceCalled('Directory', '/etc/hcatalog/conf',
+      owner = 'hcat',
+      group = 'hadoop',
+    )
+    self.assertResourceCalled('XmlConfig', 'webhcat-site.xml',
+      owner = 'hcat',
+      group = 'hadoop',
+      conf_dir = '/etc/hcatalog/conf',
+      configurations = self.getConfig()['configurations']['webhcat-site'],
+      configuration_attributes = self.getConfig()['configuration_attributes']['webhcat-site']
+    )
+    self.assertResourceCalled('File', '/etc/hcatalog/conf/webhcat-env.sh',
+      content = InlineTemplate(self.getConfig()['configurations']['webhcat-env']['content']),
+      owner = 'hcat',
+      group = 'hadoop',
+    )
+    self.assertResourceCalled('CopyFromLocal', '/usr/lib/hadoop/contrib/streaming/hadoop-streaming*.jar',
+                              owner='hcat',
+                              mode=0755,
+                              dest_dir='/apps/webhcat',
+                              kinnit_if_needed='',
+                              hdfs_user='hdfs'
+    )
+    self.assertResourceCalled('CopyFromLocal', '/usr/share/HDP-webhcat/pig.tar.gz',
+                              owner='hcat',
+                              mode=0755,
+                              dest_dir='/apps/webhcat',
+                              kinnit_if_needed='',
+                              hdfs_user='hdfs'
+    )
+    self.assertResourceCalled('CopyFromLocal', '/usr/share/HDP-webhcat/hive.tar.gz',
+                              owner='hcat',
+                              mode=0755,
+                              dest_dir='/apps/webhcat',
+                              kinnit_if_needed='',
+                              hdfs_user='hdfs'
+    )
+
+  def assert_configure_secured(self):
+    self.assertResourceCalled('HdfsDirectory', '/user/hcat',
+                              security_enabled = True,
+                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              mode = 0755,
+                              owner = 'hcat',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', '/apps/webhcat',
+                              security_enabled = True,
+                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              mode = 0755,
+                              owner = 'hcat',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', None,
+                              security_enabled = True,
+                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              action = ['create'],
+                              )
+    self.assertResourceCalled('Directory', '/var/run/webhcat',
+      owner = 'hcat',
+      group = 'hadoop',
+      recursive = True,
+      mode = 0755,
+    )
+    self.assertResourceCalled('Directory', '/var/log/webhcat',
+      owner = 'hcat',
+      group = 'hadoop',
+      recursive = True,
+      mode = 0755,
+    )
+    self.assertResourceCalled('Directory', '/etc/hcatalog/conf',
+      owner = 'hcat',
+      group = 'hadoop',
+    )
+    self.assertResourceCalled('XmlConfig', 'webhcat-site.xml',
+      owner = 'hcat',
+      group = 'hadoop',
+      conf_dir = '/etc/hcatalog/conf',
+      configurations = self.getConfig()['configurations']['webhcat-site'],
+      configuration_attributes = self.getConfig()['configuration_attributes']['webhcat-site']
+    )
+    self.assertResourceCalled('File', '/etc/hcatalog/conf/webhcat-env.sh',
+      content = InlineTemplate(self.getConfig()['configurations']['webhcat-env']['content']),
+      owner = 'hcat',
+      group = 'hadoop',
+    )
+    self.assertResourceCalled('Execute', '/usr/bin/kinit -kt /etc/security/keytabs/hdfs.headless.keytab hdfs;',
+      path = ['/bin'],
+      user = 'hcat',
+    )
+    self.assertResourceCalled('CopyFromLocal', '/usr/lib/hadoop/contrib/streaming/hadoop-streaming*.jar',
+                              owner='hcat',
+                              mode=0755,
+                              dest_dir='/apps/webhcat',
+                              kinnit_if_needed='/usr/bin/kinit -kt /etc/security/keytabs/hdfs.headless.keytab hdfs;',
+                              hdfs_user='hdfs'
+    )
+    self.assertResourceCalled('CopyFromLocal', '/usr/share/HDP-webhcat/pig.tar.gz',
+                              owner='hcat',
+                              mode=0755,
+                              dest_dir='/apps/webhcat',
+                              kinnit_if_needed='/usr/bin/kinit -kt /etc/security/keytabs/hdfs.headless.keytab hdfs;',
+                              hdfs_user='hdfs'
+    )
+    self.assertResourceCalled('CopyFromLocal', '/usr/share/HDP-webhcat/hive.tar.gz',
+                              owner='hcat',
+                              mode=0755,
+                              dest_dir='/apps/webhcat',
+                              kinnit_if_needed='/usr/bin/kinit -kt /etc/security/keytabs/hdfs.headless.keytab hdfs;',
+                              hdfs_user='hdfs'
+    )
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-server/src/test/python/stacks/1.3.2/WEBHCAT/test_webhcat_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/1.3.2/WEBHCAT/test_webhcat_server.py b/ambari-server/src/test/python/stacks/1.3.2/WEBHCAT/test_webhcat_server.py
deleted file mode 100644
index c0d8ccc..0000000
--- a/ambari-server/src/test/python/stacks/1.3.2/WEBHCAT/test_webhcat_server.py
+++ /dev/null
@@ -1,258 +0,0 @@
-#!/usr/bin/env python
-
-'''
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-'''
-from mock.mock import MagicMock, patch
-from stacks.utils.RMFTestCase import *
-
-class TestWebHCatServer(RMFTestCase):
-
-  def test_configure_default(self):
-    self.executeScript("1.3.2/services/WEBHCAT/package/scripts/webhcat_server.py",
-                       classname = "WebHCatServer",
-                       command = "configure",
-                       config_file="default.json"
-    )
-    self.assert_configure_default()
-    self.assertNoMoreResources()
-
-  def test_start_default(self):
-    self.executeScript("1.3.2/services/WEBHCAT/package/scripts/webhcat_server.py",
-                       classname = "WebHCatServer",
-                       command = "start",
-                       config_file="default.json"
-    )
-
-    self.assert_configure_default()
-    self.assertResourceCalled('Execute', 'env HADOOP_HOME=/usr /usr/lib/hcatalog/sbin/webhcat_server.sh start',
-                              not_if = 'ls /var/run/webhcat/webhcat.pid >/dev/null 2>&1 && ps `cat /var/run/webhcat/webhcat.pid` >/dev/null 2>&1',
-                              user = 'hcat'
-    )
-    self.assertNoMoreResources()
-
-  def test_stop_default(self):
-    self.executeScript("1.3.2/services/WEBHCAT/package/scripts/webhcat_server.py",
-                       classname = "WebHCatServer",
-                       command = "stop",
-                       config_file="default.json"
-    )
-
-    self.assertResourceCalled('Execute', 'env HADOOP_HOME=/usr /usr/lib/hcatalog/sbin/webhcat_server.sh stop',
-                              user = 'hcat',
-                              )
-    self.assertResourceCalled('Execute', 'rm -f /var/run/webhcat/webhcat.pid')
-    self.assertNoMoreResources()
-
-    def test_configure_secured(self):
-      self.executeScript("1.3.2/services/WEBHCAT/package/scripts/webhcat_server.py",
-                         classname = "WebHCatServer",
-                         command = "configure",
-                         config_file="secured.json"
-      )
-
-      self.assert_configure_secured()
-      self.assertNoMoreResources()
-
-  def test_start_secured(self):
-    self.executeScript("1.3.2/services/WEBHCAT/package/scripts/webhcat_server.py",
-                       classname = "WebHCatServer",
-                       command = "start",
-                       config_file="secured.json"
-    )
-
-    self.assert_configure_secured()
-    self.assertResourceCalled('Execute', 'env HADOOP_HOME=/usr /usr/lib/hcatalog/sbin/webhcat_server.sh start',
-                              not_if = 'ls /var/run/webhcat/webhcat.pid >/dev/null 2>&1 && ps `cat /var/run/webhcat/webhcat.pid` >/dev/null 2>&1',
-                              user = 'hcat'
-    )
-    self.assertNoMoreResources()
-
-  def test_stop_secured(self):
-    self.executeScript("1.3.2/services/WEBHCAT/package/scripts/webhcat_server.py",
-                       classname = "WebHCatServer",
-                       command = "stop",
-                       config_file="secured.json"
-    )
-
-    self.assertResourceCalled('Execute', 'env HADOOP_HOME=/usr /usr/lib/hcatalog/sbin/webhcat_server.sh stop',
-                              user = 'hcat',
-                              )
-    self.assertResourceCalled('Execute', 'rm -f /var/run/webhcat/webhcat.pid')
-    self.assertNoMoreResources()
-
-  def assert_configure_default(self):
-    self.assertResourceCalled('HdfsDirectory', '/user/hcat',
-                              security_enabled = False,
-                              keytab = UnknownConfigurationMock(),
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = "/usr/bin/kinit",
-                              mode = 0755,
-                              owner = 'hcat',
-                              action = ['create_delayed'],
-                              )
-    self.assertResourceCalled('HdfsDirectory', '/apps/webhcat',
-                              security_enabled = False,
-                              keytab = UnknownConfigurationMock(),
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = "/usr/bin/kinit",
-                              mode = 0755,
-                              owner = 'hcat',
-                              action = ['create_delayed'],
-                              )
-    self.assertResourceCalled('HdfsDirectory', None,
-                              security_enabled = False,
-                              keytab = UnknownConfigurationMock(),
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = "/usr/bin/kinit",
-                              action = ['create'],
-                              )
-    self.assertResourceCalled('Directory', '/var/run/webhcat',
-      owner = 'hcat',
-      group = 'hadoop',
-      recursive = True,
-      mode = 0755,
-    )
-    self.assertResourceCalled('Directory', '/var/log/webhcat',
-      owner = 'hcat',
-      group = 'hadoop',
-      recursive = True,
-      mode = 0755,
-    )
-    self.assertResourceCalled('Directory', '/etc/hcatalog/conf',
-      owner = 'hcat',
-      group = 'hadoop',
-    )
-    self.assertResourceCalled('XmlConfig', 'webhcat-site.xml',
-      owner = 'hcat',
-      group = 'hadoop',
-      conf_dir = '/etc/hcatalog/conf',
-      configurations = self.getConfig()['configurations']['webhcat-site'],
-      configuration_attributes = self.getConfig()['configuration_attributes']['webhcat-site']
-    )
-    self.assertResourceCalled('File', '/etc/hcatalog/conf/webhcat-env.sh',
-      content = InlineTemplate(self.getConfig()['configurations']['webhcat-env']['content']),
-      owner = 'hcat',
-      group = 'hadoop',
-    )
-    self.assertResourceCalled('CopyFromLocal', '/usr/lib/hadoop/contrib/streaming/hadoop-streaming*.jar',
-                              owner='hcat',
-                              mode=0755,
-                              dest_dir='/apps/webhcat',
-                              kinnit_if_needed='',
-                              hdfs_user='hdfs'
-    )
-    self.assertResourceCalled('CopyFromLocal', '/usr/share/HDP-webhcat/pig.tar.gz',
-                              owner='hcat',
-                              mode=0755,
-                              dest_dir='/apps/webhcat',
-                              kinnit_if_needed='',
-                              hdfs_user='hdfs'
-    )
-    self.assertResourceCalled('CopyFromLocal', '/usr/share/HDP-webhcat/hive.tar.gz',
-                              owner='hcat',
-                              mode=0755,
-                              dest_dir='/apps/webhcat',
-                              kinnit_if_needed='',
-                              hdfs_user='hdfs'
-    )
-
-  def assert_configure_secured(self):
-    self.assertResourceCalled('HdfsDirectory', '/user/hcat',
-                              security_enabled = True,
-                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = '/usr/bin/kinit',
-                              mode = 0755,
-                              owner = 'hcat',
-                              action = ['create_delayed'],
-                              )
-    self.assertResourceCalled('HdfsDirectory', '/apps/webhcat',
-                              security_enabled = True,
-                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = '/usr/bin/kinit',
-                              mode = 0755,
-                              owner = 'hcat',
-                              action = ['create_delayed'],
-                              )
-    self.assertResourceCalled('HdfsDirectory', None,
-                              security_enabled = True,
-                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = '/usr/bin/kinit',
-                              action = ['create'],
-                              )
-    self.assertResourceCalled('Directory', '/var/run/webhcat',
-      owner = 'hcat',
-      group = 'hadoop',
-      recursive = True,
-      mode = 0755,
-    )
-    self.assertResourceCalled('Directory', '/var/log/webhcat',
-      owner = 'hcat',
-      group = 'hadoop',
-      recursive = True,
-      mode = 0755,
-    )
-    self.assertResourceCalled('Directory', '/etc/hcatalog/conf',
-      owner = 'hcat',
-      group = 'hadoop',
-    )
-    self.assertResourceCalled('XmlConfig', 'webhcat-site.xml',
-      owner = 'hcat',
-      group = 'hadoop',
-      conf_dir = '/etc/hcatalog/conf',
-      configurations = self.getConfig()['configurations']['webhcat-site'],
-      configuration_attributes = self.getConfig()['configuration_attributes']['webhcat-site']
-    )
-    self.assertResourceCalled('File', '/etc/hcatalog/conf/webhcat-env.sh',
-      content = InlineTemplate(self.getConfig()['configurations']['webhcat-env']['content']),
-      owner = 'hcat',
-      group = 'hadoop',
-    )
-    self.assertResourceCalled('Execute', '/usr/bin/kinit -kt /etc/security/keytabs/hdfs.headless.keytab hdfs;',
-      path = ['/bin'],
-      user = 'hcat',
-    )
-    self.assertResourceCalled('CopyFromLocal', '/usr/lib/hadoop/contrib/streaming/hadoop-streaming*.jar',
-                              owner='hcat',
-                              mode=0755,
-                              dest_dir='/apps/webhcat',
-                              kinnit_if_needed='/usr/bin/kinit -kt /etc/security/keytabs/hdfs.headless.keytab hdfs;',
-                              hdfs_user='hdfs'
-    )
-    self.assertResourceCalled('CopyFromLocal', '/usr/share/HDP-webhcat/pig.tar.gz',
-                              owner='hcat',
-                              mode=0755,
-                              dest_dir='/apps/webhcat',
-                              kinnit_if_needed='/usr/bin/kinit -kt /etc/security/keytabs/hdfs.headless.keytab hdfs;',
-                              hdfs_user='hdfs'
-    )
-    self.assertResourceCalled('CopyFromLocal', '/usr/share/HDP-webhcat/hive.tar.gz',
-                              owner='hcat',
-                              mode=0755,
-                              dest_dir='/apps/webhcat',
-                              kinnit_if_needed='/usr/bin/kinit -kt /etc/security/keytabs/hdfs.headless.keytab hdfs;',
-                              hdfs_user='hdfs'
-    )
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-server/src/test/python/stacks/1.3.2/WEBHCAT/test_webhcat_service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/1.3.2/WEBHCAT/test_webhcat_service_check.py b/ambari-server/src/test/python/stacks/1.3.2/WEBHCAT/test_webhcat_service_check.py
deleted file mode 100644
index c37319b..0000000
--- a/ambari-server/src/test/python/stacks/1.3.2/WEBHCAT/test_webhcat_service_check.py
+++ /dev/null
@@ -1,61 +0,0 @@
-#!/usr/bin/env python
-
-'''
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-'''
-from mock.mock import MagicMock, call, patch
-from stacks.utils.RMFTestCase import *
-
-class TestServiceCheck(RMFTestCase):
-
-  def test_service_check_default(self):
-
-    self.executeScript("1.3.2/services/WEBHCAT/package/scripts/service_check.py",
-                       classname="WebHCatServiceCheck",
-                       command="service_check",
-                       config_file="default.json"
-    )
-    self.assertResourceCalled('File', '/tmp/templetonSmoke.sh',
-                       content = StaticFile('templetonSmoke.sh'),
-                       mode = 0755,
-    )
-    self.assertResourceCalled('Execute', 'sh /tmp/templetonSmoke.sh c6402.ambari.apache.org ambari-qa no_keytab false /usr/bin/kinit',
-                       logoutput = True,
-                       path = ['/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin'],
-                       tries = 3,
-                       try_sleep = 5,
-    )
-    self.assertNoMoreResources()
-
-  def test_service_check_secured(self):
-
-    self.executeScript("1.3.2/services/WEBHCAT/package/scripts/service_check.py",
-                       classname="WebHCatServiceCheck",
-                       command="service_check",
-                       config_file="secured.json"
-    )
-    self.assertResourceCalled('File', '/tmp/templetonSmoke.sh',
-                       content = StaticFile('templetonSmoke.sh'),
-                       mode = 0755,
-    )
-    self.assertResourceCalled('Execute', 'sh /tmp/templetonSmoke.sh c6402.ambari.apache.org ambari-qa /etc/security/keytabs/smokeuser.headless.keytab true /usr/bin/kinit',
-                       logoutput = True,
-                       path = ['/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin'],
-                       tries = 3,
-                       try_sleep = 5,
-    )
-    self.assertNoMoreResources()

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_service_check.py b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_service_check.py
index 4ae9ad2..79ac487 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_service_check.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_service_check.py
@@ -63,6 +63,16 @@ class TestServiceCheck(RMFTestCase):
                         environment = {'PATH' : os.environ['PATH'] + os.pathsep + "/usr/lib/hive/bin"},
                         try_sleep = 5,
     )
+    self.assertResourceCalled('File', '/tmp/templetonSmoke.sh',
+                              content = StaticFile('templetonSmoke.sh'),
+                              mode = 0755,
+                              )
+    self.assertResourceCalled('Execute', '/tmp/templetonSmoke.sh c6402.ambari.apache.org ambari-qa no_keytab false /usr/bin/kinit',
+                              logoutput = True,
+                              path = ['/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin'],
+                              tries = 3,
+                              try_sleep = 5,
+                              )
     self.assertNoMoreResources()
 
   @patch("sys.exit")
@@ -103,4 +113,15 @@ class TestServiceCheck(RMFTestCase):
                         environment = {'PATH' : os.environ['PATH'] + os.pathsep + "/usr/lib/hive/bin"},
                         try_sleep = 5,
     )
+    self.assertResourceCalled('File', '/tmp/templetonSmoke.sh',
+                              content = StaticFile('templetonSmoke.sh'),
+                              mode = 0755,
+                              )
+    self.assertResourceCalled('Execute', '/tmp/templetonSmoke.sh c6402.ambari.apache.org ambari-qa /etc/security/keytabs/smokeuser.headless.keytab true /usr/bin/kinit',
+                              logoutput = True,
+                              path = ['/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin'],
+                              tries = 3,
+                              try_sleep = 5,
+                              )
+    self.assertNoMoreResources()
     self.assertNoMoreResources()

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_webhcat_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_webhcat_server.py b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_webhcat_server.py
new file mode 100644
index 0000000..71839cb
--- /dev/null
+++ b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_webhcat_server.py
@@ -0,0 +1,270 @@
+#!/usr/bin/env python
+
+'''
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+'''
+from mock.mock import MagicMock, patch
+from stacks.utils.RMFTestCase import *
+
+class TestWebHCatServer(RMFTestCase):
+
+  def test_configure_default(self):
+    self.executeScript("2.0.6/services/HIVE/package/scripts/webhcat_server.py",
+                       classname = "WebHCatServer",
+                       command = "configure",
+                       config_file="default.json"
+    )
+    self.assert_configure_default()
+    self.assertNoMoreResources()
+
+  def test_start_default(self):
+    self.executeScript("2.0.6/services/HIVE/package/scripts/webhcat_server.py",
+                       classname = "WebHCatServer",
+                       command = "start",
+                       config_file="default.json"
+    )
+
+    self.assert_configure_default()
+    self.assertResourceCalled('Execute', 'env HADOOP_HOME=/usr /usr/lib/hcatalog/sbin/webhcat_server.sh start',
+                              not_if = 'ls /var/run/webhcat/webhcat.pid >/dev/null 2>&1 && ps `cat /var/run/webhcat/webhcat.pid` >/dev/null 2>&1',
+                              user = 'hcat'
+    )
+    self.assertNoMoreResources()
+
+  def test_stop_default(self):
+    self.executeScript("2.0.6/services/HIVE/package/scripts/webhcat_server.py",
+                       classname = "WebHCatServer",
+                       command = "stop",
+                       config_file="default.json"
+    )
+
+    self.assertResourceCalled('Execute', 'env HADOOP_HOME=/usr /usr/lib/hcatalog/sbin/webhcat_server.sh stop',
+                              user = 'hcat',
+                              )
+    self.assertResourceCalled('Execute', 'rm -f /var/run/webhcat/webhcat.pid')
+    self.assertNoMoreResources()
+
+    def test_configure_secured(self):
+      self.executeScript("2.0.6/services/HIVE/package/scripts/webhcat_server.py",
+                         classname = "WebHCatServer",
+                         command = "configure",
+                         config_file="secured.json"
+      )
+
+      self.assert_configure_secured()
+      self.assertNoMoreResources()
+
+  def test_start_secured(self):
+    self.executeScript("2.0.6/services/HIVE/package/scripts/webhcat_server.py",
+                       classname = "WebHCatServer",
+                       command = "start",
+                       config_file="secured.json"
+    )
+
+    self.assert_configure_secured()
+    self.assertResourceCalled('Execute', 'env HADOOP_HOME=/usr /usr/lib/hcatalog/sbin/webhcat_server.sh start',
+                              not_if = 'ls /var/run/webhcat/webhcat.pid >/dev/null 2>&1 && ps `cat /var/run/webhcat/webhcat.pid` >/dev/null 2>&1',
+                              user = 'hcat'
+    )
+    self.assertNoMoreResources()
+
+  def test_stop_secured(self):
+    self.executeScript("2.0.6/services/HIVE/package/scripts/webhcat_server.py",
+                       classname = "WebHCatServer",
+                       command = "stop",
+                       config_file="secured.json"
+    )
+
+    self.assertResourceCalled('Execute', 'env HADOOP_HOME=/usr /usr/lib/hcatalog/sbin/webhcat_server.sh stop',
+                              user = 'hcat',
+                              )
+    self.assertResourceCalled('Execute', 'rm -f /var/run/webhcat/webhcat.pid')
+    self.assertNoMoreResources()
+
+  def assert_configure_default(self):
+    self.assertResourceCalled('HdfsDirectory', '/apps/webhcat',
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = "/usr/bin/kinit",
+                              mode = 0755,
+                              owner = 'hcat',
+                              bin_dir = '/usr/bin',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', '/user/hcat',
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = "/usr/bin/kinit",
+                              mode = 0755,
+                              owner = 'hcat',
+                              bin_dir = '/usr/bin',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', None,
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = "/usr/bin/kinit",
+                              bin_dir = '/usr/bin',
+                              action = ['create'],
+                              )
+    self.assertResourceCalled('Directory', '/var/run/webhcat',
+                              owner = 'hcat',
+                              group = 'hadoop',
+                              recursive = True,
+                              mode = 0755,
+                              )
+    self.assertResourceCalled('Directory', '/var/log/webhcat',
+                              owner = 'hcat',
+                              group = 'hadoop',
+                              recursive = True,
+                              mode = 0755,
+                              )
+    self.assertResourceCalled('Directory', '/etc/hcatalog/conf',
+                              owner = 'hcat',
+                              group = 'hadoop',
+                              )
+    self.assertResourceCalled('XmlConfig', 'webhcat-site.xml',
+                              owner = 'hcat',
+                              group = 'hadoop',
+                              conf_dir = '/etc/hcatalog/conf',
+                              configurations = self.getConfig()['configurations']['webhcat-site'],
+                              configuration_attributes = self.getConfig()['configuration_attributes']['webhcat-site']
+    )
+    self.assertResourceCalled('File', '/etc/hcatalog/conf/webhcat-env.sh',
+                              content = InlineTemplate(self.getConfig()['configurations']['webhcat-env']['content']),
+                              owner = 'hcat',
+                              group = 'hadoop',
+                              )
+    self.assertResourceCalled('CopyFromLocal', '/usr/lib/hadoop-mapreduce/hadoop-streaming-*.jar',
+                              owner='hcat',
+                              mode=0755,
+                              dest_dir='/apps/webhcat',
+                              kinnit_if_needed='',
+                              hadoop_conf_dir='/etc/hadoop/conf',
+                              hdfs_user='hdfs'
+    )
+    self.assertResourceCalled('CopyFromLocal', '/usr/share/HDP-webhcat/pig.tar.gz',
+                              owner='hcat',
+                              mode=0755,
+                              dest_dir='/apps/webhcat',
+                              kinnit_if_needed='',
+                              hadoop_conf_dir='/etc/hadoop/conf',
+                              hdfs_user='hdfs'
+    )
+    self.assertResourceCalled('CopyFromLocal', '/usr/share/HDP-webhcat/hive.tar.gz',
+                              owner='hcat',
+                              mode=0755,
+                              dest_dir='/apps/webhcat',
+                              kinnit_if_needed='',
+                              hadoop_conf_dir='/etc/hadoop/conf',
+                              hdfs_user='hdfs'
+    )
+
+  def assert_configure_secured(self):
+    self.assertResourceCalled('HdfsDirectory', '/apps/webhcat',
+                              security_enabled = True,
+                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              mode = 0755,
+                              owner = 'hcat',
+                              bin_dir = '/usr/bin',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', '/user/hcat',
+                              security_enabled = True,
+                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              mode = 0755,
+                              owner = 'hcat',
+                              bin_dir = '/usr/bin',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', None,
+                              security_enabled = True,
+                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              bin_dir = '/usr/bin',
+                              action = ['create'],
+                              )
+    self.assertResourceCalled('Directory', '/var/run/webhcat',
+                              owner = 'hcat',
+                              group = 'hadoop',
+                              recursive = True,
+                              mode = 0755,
+                              )
+    self.assertResourceCalled('Directory', '/var/log/webhcat',
+                              owner = 'hcat',
+                              group = 'hadoop',
+                              recursive = True,
+                              mode = 0755,
+                              )
+    self.assertResourceCalled('Directory', '/etc/hcatalog/conf',
+                              owner = 'hcat',
+                              group = 'hadoop',
+                              )
+    self.assertResourceCalled('XmlConfig', 'webhcat-site.xml',
+                              owner = 'hcat',
+                              group = 'hadoop',
+                              conf_dir = '/etc/hcatalog/conf',
+                              configurations = self.getConfig()['configurations']['webhcat-site'],
+                              configuration_attributes = self.getConfig()['configuration_attributes']['webhcat-site']
+    )
+    self.assertResourceCalled('File', '/etc/hcatalog/conf/webhcat-env.sh',
+                              content = InlineTemplate(self.getConfig()['configurations']['webhcat-env']['content']),
+                              owner = 'hcat',
+                              group = 'hadoop',
+                              )
+    self.assertResourceCalled('Execute', '/usr/bin/kinit -kt /etc/security/keytabs/hdfs.headless.keytab hdfs;',
+                              path = ['/bin'],
+                              user = 'hcat',
+                              )
+    self.assertResourceCalled('CopyFromLocal', '/usr/lib/hadoop-mapreduce/hadoop-streaming-*.jar',
+                              owner='hcat',
+                              mode=0755,
+                              dest_dir='/apps/webhcat',
+                              kinnit_if_needed='/usr/bin/kinit -kt /etc/security/keytabs/hdfs.headless.keytab hdfs;',
+                              hadoop_conf_dir='/etc/hadoop/conf',
+                              hdfs_user='hdfs'
+    )
+    self.assertResourceCalled('CopyFromLocal', '/usr/share/HDP-webhcat/pig.tar.gz',
+                              owner='hcat',
+                              mode=0755,
+                              dest_dir='/apps/webhcat',
+                              kinnit_if_needed='/usr/bin/kinit -kt /etc/security/keytabs/hdfs.headless.keytab hdfs;',
+                              hadoop_conf_dir='/etc/hadoop/conf',
+                              hdfs_user='hdfs'
+    )
+    self.assertResourceCalled('CopyFromLocal', '/usr/share/HDP-webhcat/hive.tar.gz',
+                              owner='hcat',
+                              mode=0755,
+                              dest_dir='/apps/webhcat',
+                              kinnit_if_needed='/usr/bin/kinit -kt /etc/security/keytabs/hdfs.headless.keytab hdfs;',
+                              hadoop_conf_dir='/etc/hadoop/conf',
+                              hdfs_user='hdfs'
+    )
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-server/src/test/python/stacks/2.0.6/WEBHCAT/test_webhcat_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/WEBHCAT/test_webhcat_server.py b/ambari-server/src/test/python/stacks/2.0.6/WEBHCAT/test_webhcat_server.py
deleted file mode 100644
index bde2e86..0000000
--- a/ambari-server/src/test/python/stacks/2.0.6/WEBHCAT/test_webhcat_server.py
+++ /dev/null
@@ -1,273 +0,0 @@
-#!/usr/bin/env python
-
-'''
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-'''
-from mock.mock import MagicMock, patch
-from stacks.utils.RMFTestCase import *
-
-from resource_management.libraries import functions
-import json
-
-class TestWebHCatServer(RMFTestCase):
-
-  def test_configure_default(self):
-    self.executeScript("2.0.6/services/WEBHCAT/package/scripts/webhcat_server.py",
-                       classname = "WebHCatServer",
-                       command = "configure",
-                       config_file="default.json"
-    )
-    self.assert_configure_default()
-    self.assertNoMoreResources()
-
-  def test_start_default(self):
-    self.executeScript("2.0.6/services/WEBHCAT/package/scripts/webhcat_server.py",
-                       classname = "WebHCatServer",
-                       command = "start",
-                       config_file="default.json"
-    )
-
-    self.assert_configure_default()
-    self.assertResourceCalled('Execute', 'env HADOOP_HOME=/usr /usr/lib/hcatalog/sbin/webhcat_server.sh start',
-                              not_if = 'ls /var/run/webhcat/webhcat.pid >/dev/null 2>&1 && ps `cat /var/run/webhcat/webhcat.pid` >/dev/null 2>&1',
-                              user = 'hcat'
-    )
-    self.assertNoMoreResources()
-
-  def test_stop_default(self):
-    self.executeScript("2.0.6/services/WEBHCAT/package/scripts/webhcat_server.py",
-                       classname = "WebHCatServer",
-                       command = "stop",
-                       config_file="default.json"
-    )
-
-    self.assertResourceCalled('Execute', 'env HADOOP_HOME=/usr /usr/lib/hcatalog/sbin/webhcat_server.sh stop',
-                              user = 'hcat',
-                              )
-    self.assertResourceCalled('Execute', 'rm -f /var/run/webhcat/webhcat.pid')
-    self.assertNoMoreResources()
-
-    def test_configure_secured(self):
-      self.executeScript("2.0.6/services/WEBHCAT/package/scripts/webhcat_server.py",
-                         classname = "WebHCatServer",
-                         command = "configure",
-                         config_file="secured.json"
-      )
-
-      self.assert_configure_secured()
-      self.assertNoMoreResources()
-
-  def test_start_secured(self):
-    self.executeScript("2.0.6/services/WEBHCAT/package/scripts/webhcat_server.py",
-                       classname = "WebHCatServer",
-                       command = "start",
-                       config_file="secured.json"
-    )
-
-    self.assert_configure_secured()
-    self.assertResourceCalled('Execute', 'env HADOOP_HOME=/usr /usr/lib/hcatalog/sbin/webhcat_server.sh start',
-                              not_if = 'ls /var/run/webhcat/webhcat.pid >/dev/null 2>&1 && ps `cat /var/run/webhcat/webhcat.pid` >/dev/null 2>&1',
-                              user = 'hcat'
-    )
-    self.assertNoMoreResources()
-
-  def test_stop_secured(self):
-    self.executeScript("2.0.6/services/WEBHCAT/package/scripts/webhcat_server.py",
-                       classname = "WebHCatServer",
-                       command = "stop",
-                       config_file="secured.json"
-    )
-
-    self.assertResourceCalled('Execute', 'env HADOOP_HOME=/usr /usr/lib/hcatalog/sbin/webhcat_server.sh stop',
-                              user = 'hcat',
-                              )
-    self.assertResourceCalled('Execute', 'rm -f /var/run/webhcat/webhcat.pid')
-    self.assertNoMoreResources()
-
-  def assert_configure_default(self):
-    self.assertResourceCalled('HdfsDirectory', '/apps/webhcat',
-                              security_enabled = False,
-                              keytab = UnknownConfigurationMock(),
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = "/usr/bin/kinit",
-                              mode = 0755,
-                              owner = 'hcat',
-                              bin_dir = '/usr/bin',
-                              action = ['create_delayed'],
-                              )
-    self.assertResourceCalled('HdfsDirectory', '/user/hcat',
-                              security_enabled = False,
-                              keytab = UnknownConfigurationMock(),
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = "/usr/bin/kinit",
-                              mode = 0755,
-                              owner = 'hcat',
-                              bin_dir = '/usr/bin',
-                              action = ['create_delayed'],
-                              )
-    self.assertResourceCalled('HdfsDirectory', None,
-                              security_enabled = False,
-                              keytab = UnknownConfigurationMock(),
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = "/usr/bin/kinit",
-                              bin_dir = '/usr/bin',
-                              action = ['create'],
-                              )
-    self.assertResourceCalled('Directory', '/var/run/webhcat',
-      owner = 'hcat',
-      group = 'hadoop',
-      recursive = True,
-      mode = 0755,
-    )
-    self.assertResourceCalled('Directory', '/var/log/webhcat',
-      owner = 'hcat',
-      group = 'hadoop',
-      recursive = True,
-      mode = 0755,
-    )
-    self.assertResourceCalled('Directory', '/etc/hcatalog/conf',
-      owner = 'hcat',
-      group = 'hadoop',
-    )
-    self.assertResourceCalled('XmlConfig', 'webhcat-site.xml',
-      owner = 'hcat',
-      group = 'hadoop',
-      conf_dir = '/etc/hcatalog/conf',
-      configurations = self.getConfig()['configurations']['webhcat-site'],
-      configuration_attributes = self.getConfig()['configuration_attributes']['webhcat-site']
-    )
-    self.assertResourceCalled('File', '/etc/hcatalog/conf/webhcat-env.sh',
-      content = InlineTemplate(self.getConfig()['configurations']['webhcat-env']['content']),
-      owner = 'hcat',
-      group = 'hadoop',
-    )
-    self.assertResourceCalled('CopyFromLocal', '/usr/lib/hadoop-mapreduce/hadoop-streaming-*.jar',
-                              owner='hcat',
-                              mode=0755,
-                              dest_dir='/apps/webhcat',
-                              kinnit_if_needed='',
-                              hadoop_conf_dir='/etc/hadoop/conf',
-                              hdfs_user='hdfs'
-    )
-    self.assertResourceCalled('CopyFromLocal', '/usr/share/HDP-webhcat/pig.tar.gz',
-                              owner='hcat',
-                              mode=0755,
-                              dest_dir='/apps/webhcat',
-                              kinnit_if_needed='',
-                              hadoop_conf_dir='/etc/hadoop/conf',
-                              hdfs_user='hdfs'
-    )
-    self.assertResourceCalled('CopyFromLocal', '/usr/share/HDP-webhcat/hive.tar.gz',
-                              owner='hcat',
-                              mode=0755,
-                              dest_dir='/apps/webhcat',
-                              kinnit_if_needed='',
-                              hadoop_conf_dir='/etc/hadoop/conf',
-                              hdfs_user='hdfs'
-    )
-
-  def assert_configure_secured(self):
-    self.assertResourceCalled('HdfsDirectory', '/apps/webhcat',
-                              security_enabled = True,
-                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = '/usr/bin/kinit',
-                              mode = 0755,
-                              owner = 'hcat',
-                              bin_dir = '/usr/bin',
-                              action = ['create_delayed'],
-                              )
-    self.assertResourceCalled('HdfsDirectory', '/user/hcat',
-                              security_enabled = True,
-                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = '/usr/bin/kinit',
-                              mode = 0755,
-                              owner = 'hcat',
-                              bin_dir = '/usr/bin',
-                              action = ['create_delayed'],
-                              )
-    self.assertResourceCalled('HdfsDirectory', None,
-                              security_enabled = True,
-                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = '/usr/bin/kinit',
-                              bin_dir = '/usr/bin',
-                              action = ['create'],
-                              )
-    self.assertResourceCalled('Directory', '/var/run/webhcat',
-      owner = 'hcat',
-      group = 'hadoop',
-      recursive = True,
-      mode = 0755,
-    )
-    self.assertResourceCalled('Directory', '/var/log/webhcat',
-      owner = 'hcat',
-      group = 'hadoop',
-      recursive = True,
-      mode = 0755,
-    )
-    self.assertResourceCalled('Directory', '/etc/hcatalog/conf',
-      owner = 'hcat',
-      group = 'hadoop',
-    )
-    self.assertResourceCalled('XmlConfig', 'webhcat-site.xml',
-      owner = 'hcat',
-      group = 'hadoop',
-      conf_dir = '/etc/hcatalog/conf',
-      configurations = self.getConfig()['configurations']['webhcat-site'],
-      configuration_attributes = self.getConfig()['configuration_attributes']['webhcat-site']
-    )
-    self.assertResourceCalled('File', '/etc/hcatalog/conf/webhcat-env.sh',
-      content = InlineTemplate(self.getConfig()['configurations']['webhcat-env']['content']),
-      owner = 'hcat',
-      group = 'hadoop',
-    )
-    self.assertResourceCalled('Execute', '/usr/bin/kinit -kt /etc/security/keytabs/hdfs.headless.keytab hdfs;',
-      path = ['/bin'],
-      user = 'hcat',
-    )
-    self.assertResourceCalled('CopyFromLocal', '/usr/lib/hadoop-mapreduce/hadoop-streaming-*.jar',
-                              owner='hcat',
-                              mode=0755,
-                              dest_dir='/apps/webhcat',
-                              kinnit_if_needed='/usr/bin/kinit -kt /etc/security/keytabs/hdfs.headless.keytab hdfs;',
-                              hadoop_conf_dir='/etc/hadoop/conf',
-                              hdfs_user='hdfs'
-    )
-    self.assertResourceCalled('CopyFromLocal', '/usr/share/HDP-webhcat/pig.tar.gz',
-                              owner='hcat',
-                              mode=0755,
-                              dest_dir='/apps/webhcat',
-                              kinnit_if_needed='/usr/bin/kinit -kt /etc/security/keytabs/hdfs.headless.keytab hdfs;',
-                              hadoop_conf_dir='/etc/hadoop/conf',
-                              hdfs_user='hdfs'
-    )
-    self.assertResourceCalled('CopyFromLocal', '/usr/share/HDP-webhcat/hive.tar.gz',
-                              owner='hcat',
-                              mode=0755,
-                              dest_dir='/apps/webhcat',
-                              kinnit_if_needed='/usr/bin/kinit -kt /etc/security/keytabs/hdfs.headless.keytab hdfs;',
-                              hadoop_conf_dir='/etc/hadoop/conf',
-                              hdfs_user='hdfs'
-    )
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-server/src/test/python/stacks/2.0.6/WEBHCAT/test_webhcat_service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/WEBHCAT/test_webhcat_service_check.py b/ambari-server/src/test/python/stacks/2.0.6/WEBHCAT/test_webhcat_service_check.py
deleted file mode 100644
index 314d3f6..0000000
--- a/ambari-server/src/test/python/stacks/2.0.6/WEBHCAT/test_webhcat_service_check.py
+++ /dev/null
@@ -1,61 +0,0 @@
-#!/usr/bin/env python
-
-'''
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-'''
-from mock.mock import MagicMock, call, patch
-from stacks.utils.RMFTestCase import *
-
-class TestServiceCheck(RMFTestCase):
-
-  def test_service_check_default(self):
-
-    self.executeScript("2.0.6/services/WEBHCAT/package/scripts/service_check.py",
-                       classname="WebHCatServiceCheck",
-                       command="service_check",
-                       config_file="default.json"
-    )
-    self.assertResourceCalled('File', '/tmp/templetonSmoke.sh',
-                       content = StaticFile('templetonSmoke.sh'),
-                       mode = 0755,
-    )
-    self.assertResourceCalled('Execute', '/tmp/templetonSmoke.sh c6402.ambari.apache.org ambari-qa no_keytab false /usr/bin/kinit',
-                       logoutput = True,
-                       path = ['/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin'],
-                       tries = 3,
-                       try_sleep = 5,
-    )
-    self.assertNoMoreResources()
-
-  def test_service_check_secured(self):
-
-    self.executeScript("2.0.6/services/WEBHCAT/package/scripts/service_check.py",
-                       classname="WebHCatServiceCheck",
-                       command="service_check",
-                       config_file="secured.json"
-    )
-    self.assertResourceCalled('File', '/tmp/templetonSmoke.sh',
-                       content = StaticFile('templetonSmoke.sh'),
-                       mode = 0755,
-    )
-    self.assertResourceCalled('Execute', '/tmp/templetonSmoke.sh c6402.ambari.apache.org ambari-qa /etc/security/keytabs/smokeuser.headless.keytab true /usr/bin/kinit',
-                       logoutput = True,
-                       path = ['/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin'],
-                       tries = 3,
-                       try_sleep = 5,
-    )
-    self.assertNoMoreResources()

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-server/src/test/resources/stacks/HDP/2.0.5/services/HIVE/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/resources/stacks/HDP/2.0.5/services/HIVE/metainfo.xml b/ambari-server/src/test/resources/stacks/HDP/2.0.5/services/HIVE/metainfo.xml
index 5215d34..5145ef2 100644
--- a/ambari-server/src/test/resources/stacks/HDP/2.0.5/services/HIVE/metainfo.xml
+++ b/ambari-server/src/test/resources/stacks/HDP/2.0.5/services/HIVE/metainfo.xml
@@ -81,6 +81,49 @@
         </component>
 
         <component>
+          <name>WEBHCAT_SERVER</name>
+          <displayName>WebHCat Server</displayName>
+          <category>MASTER</category>
+          <cardinality>1</cardinality>
+          <dependencies>
+            <dependency>
+              <name>HDFS/HDFS_CLIENT</name>
+              <scope>host</scope>
+              <auto-deploy>
+                <enabled>true</enabled>
+              </auto-deploy>
+            </dependency>
+            <dependency>
+              <name>MAPREDUCE/MAPREDUCE_CLIENT</name>
+              <scope>host</scope>
+              <auto-deploy>
+                <enabled>true</enabled>
+              </auto-deploy>
+            </dependency>
+            <dependency>
+              <name>ZOOKEEPER/ZOOKEEPER_SERVER</name>
+              <scope>cluster</scope>
+              <auto-deploy>
+                <enabled>true</enabled>
+                <co-locate>WEBHCAT/WEBHCAT_SERVER</co-locate>
+              </auto-deploy>
+            </dependency>
+            <dependency>
+              <name>ZOOKEEPER/ZOOKEEPER_CLIENT</name>
+              <scope>host</scope>
+              <auto-deploy>
+                <enabled>true</enabled>
+              </auto-deploy>
+            </dependency>
+          </dependencies>
+          <commandScript>
+            <script>scripts/webhcat_server.py</script>
+            <scriptType>PYTHON</scriptType>
+            <timeout>600</timeout>
+          </commandScript>
+        </component>
+
+        <component>
           <name>MYSQL_SERVER</name>
           <category>MASTER</category>
           <cardinality>1</cardinality>
@@ -111,6 +154,37 @@
             </configFile>
           </configFiles>
         </component>
+        <component>
+          <name>HCAT</name>
+          <displayName>HCat</displayName>
+          <category>CLIENT</category>
+          <commandScript>
+            <script>scripts/hcat_client.py</script>
+            <scriptType>PYTHON</scriptType>
+          </commandScript>
+          <configFiles>
+            <configFile>
+              <type>xml</type>
+              <fileName>hive-site.xml</fileName>
+              <dictionaryName>hive-site</dictionaryName>
+            </configFile>
+            <configFile>
+              <type>env</type>
+              <fileName>hive-env.sh</fileName>
+              <dictionaryName>hive-env</dictionaryName>
+            </configFile>
+            <configFile>
+              <type>env</type>
+              <fileName>hive-log4j.properties</fileName>
+              <dictionaryName>hive-log4j</dictionaryName>
+            </configFile>
+            <configFile>
+              <type>env</type>
+              <fileName>hive-exec-log4j.properties</fileName>
+              <dictionaryName>hive-exec-log4j</dictionaryName>
+            </configFile>
+          </configFiles>
+        </component>
       </components>
 
       <osSpecifics>

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-server/src/test/resources/stacks/HDP/2.0.5/services/WEBHCAT/configuration/webhcat-site.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/resources/stacks/HDP/2.0.5/services/WEBHCAT/configuration/webhcat-site.xml b/ambari-server/src/test/resources/stacks/HDP/2.0.5/services/WEBHCAT/configuration/webhcat-site.xml
deleted file mode 100644
index 5a78c0c..0000000
--- a/ambari-server/src/test/resources/stacks/HDP/2.0.5/services/WEBHCAT/configuration/webhcat-site.xml
+++ /dev/null
@@ -1,126 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!-- 
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
--->
-
-<!-- The default settings for Templeton. -->
-<!-- Edit templeton-site.xml to change settings for your local -->
-<!-- install. -->
-
-<configuration supports_final="true">
-
-  <property>
-    <name>templeton.port</name>
-      <value>50111</value>
-    <description>The HTTP port for the main server.</description>
-  </property>
-
-  <property>
-    <name>templeton.hadoop.conf.dir</name>
-    <value>/etc/hadoop/conf</value>
-    <description>The path to the Hadoop configuration.</description>
-  </property>
-
-  <property>
-    <name>templeton.jar</name>
-    <value>/usr/lib/hcatalog/share/webhcat/svr/webhcat.jar</value>
-    <description>The path to the Templeton jar file.</description>
-  </property>
-
-  <property>
-    <name>templeton.libjars</name>
-    <value>/usr/lib/zookeeper/zookeeper.jar</value>
-    <description>Jars to add the the classpath.</description>
-  </property>
-
-
-  <property>
-    <name>templeton.hadoop</name>
-    <value>/usr/bin/hadoop</value>
-    <description>The path to the Hadoop executable.</description>
-  </property>
-
-  <property>
-    <name>templeton.pig.archive</name>
-    <value>hdfs:///apps/webhcat/pig.tar.gz</value>
-    <description>The path to the Pig archive.</description>
-  </property>
-
-  <property>
-    <name>templeton.pig.path</name>
-    <value>pig.tar.gz/pig/bin/pig</value>
-    <description>The path to the Pig executable.</description>
-  </property>
-
-  <property>
-    <name>templeton.hcat</name>
-    <value>/usr/bin/hcat</value>
-    <description>The path to the hcatalog executable.</description>
-  </property>
-
-  <property>
-    <name>templeton.hive.archive</name>
-    <value>hdfs:///apps/webhcat/hive.tar.gz</value>
-    <description>The path to the Hive archive.</description>
-  </property>
-
-  <property>
-    <name>templeton.hive.path</name>
-    <value>hive.tar.gz/hive/bin/hive</value>
-    <description>The path to the Hive executable.</description>
-  </property>
-
-  <property>
-    <name>templeton.hive.properties</name>
-    <value></value>
-    <description>Properties to set when running hive.</description>
-  </property>
-
-
-  <property>
-    <name>templeton.zookeeper.hosts</name>
-    <value></value>
-    <description>ZooKeeper servers, as comma separated host:port pairs</description>
-  </property>
-
-  <property>
-    <name>templeton.storage.class</name>
-    <value>org.apache.hcatalog.templeton.tool.ZooKeeperStorage</value>
-    <description>The class to use as storage</description>
-  </property>
-
-  <property>
-   <name>templeton.override.enabled</name>
-   <value>false</value>
-   <description>
-     Enable the override path in templeton.override.jars
-   </description>
- </property>
-
- <property>
-    <name>templeton.streaming.jar</name>
-    <value>hdfs:///apps/webhcat/hadoop-streaming.jar</value>
-    <description>The hdfs path to the Hadoop streaming jar file.</description>
-  </property> 
-
-  <property>
-    <name>templeton.exec.timeout</name>
-    <value>60000</value>
-    <description>Time out for templeton api</description>
-  </property>
-
-</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-server/src/test/resources/stacks/HDP/2.0.5/services/WEBHCAT/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/resources/stacks/HDP/2.0.5/services/WEBHCAT/metainfo.xml b/ambari-server/src/test/resources/stacks/HDP/2.0.5/services/WEBHCAT/metainfo.xml
deleted file mode 100644
index f742d99..0000000
--- a/ambari-server/src/test/resources/stacks/HDP/2.0.5/services/WEBHCAT/metainfo.xml
+++ /dev/null
@@ -1,102 +0,0 @@
-<?xml version="1.0"?>
-<!--
-   Licensed to the Apache Software Foundation (ASF) under one or more
-   contributor license agreements.  See the NOTICE file distributed with
-   this work for additional information regarding copyright ownership.
-   The ASF licenses this file to You under the Apache License, Version 2.0
-   (the "License"); you may not use this file except in compliance with
-   the License.  You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-   Unless required by applicable law or agreed to in writing, software
-   distributed under the License is distributed on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-   See the License for the specific language governing permissions and
-   limitations under the License.
--->
-<metainfo>
-  <schemaVersion>2.0</schemaVersion>
-  <services>
-    <service>
-      <name>WEBHCAT</name>
-      <comment>This is comment for WEBHCAT service</comment>
-      <version>0.11.0.2.0.5.0</version>
-
-      <components>
-        <component>
-          <name>WEBHCAT_SERVER</name>
-          <category>MASTER</category>
-          <cardinality>1</cardinality>
-          <dependencies>
-            <dependency>
-              <name>HDFS/HDFS_CLIENT</name>
-              <scope>host</scope>
-              <auto-deploy>
-                <enabled>true</enabled>
-              </auto-deploy>
-            </dependency>
-            <dependency>
-              <name>MAPREDUCE2/MAPREDUCE2_CLIENT</name>
-              <scope>host</scope>
-              <auto-deploy>
-                <enabled>true</enabled>
-              </auto-deploy>
-            </dependency>
-            <dependency>
-              <name>ZOOKEEPER/ZOOKEEPER_SERVER</name>
-              <scope>cluster</scope>
-              <auto-deploy>
-                <enabled>true</enabled>
-                <co-locate>WEBHCAT/WEBHCAT_SERVER</co-locate>
-              </auto-deploy>
-            </dependency>
-            <dependency>
-              <name>ZOOKEEPER/ZOOKEEPER_CLIENT</name>
-              <scope>host</scope>
-              <auto-deploy>
-                <enabled>true</enabled>
-              </auto-deploy>
-            </dependency>
-            <dependency>
-              <name>YARN/YARN_CLIENT</name>
-              <scope>host</scope>
-              <auto-deploy>
-                <enabled>true</enabled>
-              </auto-deploy>
-            </dependency>
-          </dependencies>
-          <commandScript>
-            <script>scripts/webhcat_server.py</script>
-            <scriptType>PYTHON</scriptType>
-            <timeout>600</timeout>
-          </commandScript>
-        </component>
-      </components>
-      <osSpecifics>
-        <osSpecific>
-          <osFamily>any</osFamily>
-          <packages>
-            <package>
-              <name>hcatalog</name>
-            </package>
-            <package>
-              <name>webhcat-tar-hive</name>
-            </package>
-            <package>
-              <name>webhcat-tar-pig</name>
-            </package>
-          </packages>
-        </osSpecific>
-      </osSpecifics>
-      <commandScript>
-        <script>scripts/service_check.py</script>
-        <scriptType>PYTHON</scriptType>
-        <timeout>300</timeout>
-      </commandScript>
-      <configuration-dependencies>
-        <config-type>webhcat-site</config-type>
-      </configuration-dependencies>
-    </service>
-  </services>
-</metainfo>

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-server/src/test/resources/stacks/HDP/2.0.6/services/WEBHCAT/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/resources/stacks/HDP/2.0.6/services/WEBHCAT/metainfo.xml b/ambari-server/src/test/resources/stacks/HDP/2.0.6/services/WEBHCAT/metainfo.xml
deleted file mode 100644
index d73e67e..0000000
--- a/ambari-server/src/test/resources/stacks/HDP/2.0.6/services/WEBHCAT/metainfo.xml
+++ /dev/null
@@ -1,28 +0,0 @@
-<?xml version="1.0"?>
-<!--
-   Licensed to the Apache Software Foundation (ASF) under one or more
-   contributor license agreements.  See the NOTICE file distributed with
-   this work for additional information regarding copyright ownership.
-   The ASF licenses this file to You under the Apache License, Version 2.0
-   (the "License"); you may not use this file except in compliance with
-   the License.  You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-   Unless required by applicable law or agreed to in writing, software
-   distributed under the License is distributed on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-   See the License for the specific language governing permissions and
-   limitations under the License.
--->
-<metainfo>
-  <schemaVersion>2.0</schemaVersion>
-  <services>
-    <service>
-      <name>WEBHCAT</name>
-      <displayName>WebHCat</displayName>
-      <comment>This is comment for WEBHCAT service</comment>
-      <version>0.11.0.2.0.5.0</version>
-    </service>
-  </services>
-</metainfo>


[19/27] AMBARI-7296. HCatalog and WebHCat services should not be managed as separate service (should be part of Hive service) (jaimin)

Posted by jo...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/WEBHCAT/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/WEBHCAT/package/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/WEBHCAT/package/scripts/params.py
deleted file mode 100644
index 174c92c..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/WEBHCAT/package/scripts/params.py
+++ /dev/null
@@ -1,78 +0,0 @@
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-Ambari Agent
-
-"""
-
-from resource_management import *
-import status_params
-
-# server configurations
-config = Script.get_config()
-tmp_dir = Script.get_tmp_dir()
-
-hcat_user = config['configurations']['hive-env']['hcat_user']
-webhcat_user = config['configurations']['hive-env']['webhcat_user']
-webhcat_env_sh_template = config['configurations']['webhcat-env']['content']
-
-config_dir = '/etc/hcatalog/conf'
-
-templeton_log_dir = config['configurations']['hive-env']['hcat_log_dir']
-templeton_pid_dir = status_params.templeton_pid_dir
-
-pid_file = status_params.pid_file
-
-hadoop_conf_dir = config['configurations']['webhcat-site']['templeton.hadoop.conf.dir']
-templeton_jar = config['configurations']['webhcat-site']['templeton.jar']
-
-hadoop_home = '/usr'
-user_group = config['configurations']['cluster-env']['user_group']
-
-webhcat_server_host = config['clusterHostInfo']['webhcat_server_host']
-
-webhcat_apps_dir = "/apps/webhcat"
-smoke_user_keytab = config['configurations']['cluster-env']['smokeuser_keytab']
-smokeuser = config['configurations']['cluster-env']['smokeuser']
-security_enabled = config['configurations']['cluster-env']['security_enabled']
-kinit_path_local = functions.get_kinit_path(["/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
-
-#hdfs directories
-webhcat_apps_dir = "/apps/webhcat"
-hcat_hdfs_user_dir = format("/user/{hcat_user}")
-hcat_hdfs_user_mode = 0755
-webhcat_hdfs_user_dir = format("/user/{webhcat_user}")
-webhcat_hdfs_user_mode = 0755
-#for create_hdfs_directory
-hostname = config["hostname"]
-security_param = "true" if security_enabled else "false"
-hadoop_conf_dir = "/etc/hadoop/conf"
-hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab']
-hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
-hdfs_principal_name = config['configurations']['hadoop-env']['hdfs_principal_name']
-kinit_path_local = functions.get_kinit_path(["/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
-import functools
-#create partial functions with common arguments for every HdfsDirectory call
-#to create hdfs directory we need to call params.HdfsDirectory in code
-HdfsDirectory = functools.partial(
-  HdfsDirectory,
-  conf_dir=hadoop_conf_dir,
-  hdfs_user=hdfs_user,
-  security_enabled = security_enabled,
-  keytab = hdfs_user_keytab,
-  kinit_path_local = kinit_path_local
-)

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/WEBHCAT/package/scripts/service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/WEBHCAT/package/scripts/service_check.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/WEBHCAT/package/scripts/service_check.py
deleted file mode 100644
index b6101ac..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/WEBHCAT/package/scripts/service_check.py
+++ /dev/null
@@ -1,44 +0,0 @@
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-Ambari Agent
-
-"""
-from resource_management import *
-
-class WebHCatServiceCheck(Script):
-  def service_check(self, env):
-    import params
-    env.set_params(params)
-
-    File(format("{tmp_dir}/templetonSmoke.sh"),
-         content= StaticFile('templetonSmoke.sh'),
-         mode=0755
-    )
-
-    cmd = format("sh {tmp_dir}/templetonSmoke.sh {webhcat_server_host[0]} {smokeuser} {smokeuser_keytab}"
-                 " {security_param} {kinit_path_local}",
-                 smokeuser_keytab=params.smoke_user_keytab if params.security_enabled else "no_keytab")
-
-    Execute(cmd,
-            tries=3,
-            try_sleep=5,
-            path='/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin',
-            logoutput=True)
-
-if __name__ == "__main__":
-  WebHCatServiceCheck().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/WEBHCAT/package/scripts/status_params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/WEBHCAT/package/scripts/status_params.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/WEBHCAT/package/scripts/status_params.py
deleted file mode 100644
index 23823e6..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/WEBHCAT/package/scripts/status_params.py
+++ /dev/null
@@ -1,26 +0,0 @@
-#!/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-
-from resource_management import *
-
-config = Script.get_config()
-
-templeton_pid_dir = config['configurations']['hive-env']['hcat_pid_dir']
-pid_file = format('{templeton_pid_dir}/webhcat.pid')

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/WEBHCAT/package/scripts/webhcat.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/WEBHCAT/package/scripts/webhcat.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/WEBHCAT/package/scripts/webhcat.py
deleted file mode 100644
index 037cdb5..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/WEBHCAT/package/scripts/webhcat.py
+++ /dev/null
@@ -1,107 +0,0 @@
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-Ambari Agent
-
-"""
-from resource_management import *
-
-
-def webhcat():
-  import params
-
-  if params.hcat_hdfs_user_dir != params.webhcat_hdfs_user_dir:
-    params.HdfsDirectory(params.hcat_hdfs_user_dir,
-                         action="create_delayed",
-                         owner=params.hcat_user,
-                         mode=params.hcat_hdfs_user_mode
-    )
-  params.HdfsDirectory(params.webhcat_hdfs_user_dir,
-                       action="create_delayed",
-                       owner=params.webhcat_user,
-                       mode=params.webhcat_hdfs_user_mode
-  )
-  params.HdfsDirectory(params.webhcat_apps_dir,
-                       action="create_delayed",
-                       owner=params.webhcat_user,
-                       mode=0755
-  )
-  params.HdfsDirectory(None, action="create")
-  Directory(params.templeton_pid_dir,
-            owner=params.webhcat_user,
-            mode=0755,
-            group=params.user_group,
-            recursive=True)
-
-  Directory(params.templeton_log_dir,
-            owner=params.webhcat_user,
-            mode=0755,
-            group=params.user_group,
-            recursive=True)
-
-  Directory(params.config_dir,
-            owner=params.webhcat_user,
-            group=params.user_group)
-
-  XmlConfig("webhcat-site.xml",
-            conf_dir=params.config_dir,
-            configurations=params.config['configurations']['webhcat-site'],
-            configuration_attributes=params.config['configuration_attributes']['webhcat-site'],
-            owner=params.webhcat_user,
-            group=params.user_group,
-  )
-
-  File(format("{config_dir}/webhcat-env.sh"),
-       owner=params.webhcat_user,
-       group=params.user_group,
-       content=InlineTemplate(params.webhcat_env_sh_template)
-  )
-
-  if params.security_enabled:
-    kinit_if_needed = format("{kinit_path_local} -kt {hdfs_user_keytab} {hdfs_user};")
-  else:
-    kinit_if_needed = ""
-
-  if kinit_if_needed:
-    Execute(kinit_if_needed,
-            user=params.webhcat_user,
-            path='/bin'
-    )
-
-  CopyFromLocal('/usr/lib/hadoop/contrib/streaming/hadoop-streaming*.jar',
-                owner=params.webhcat_user,
-                mode=0755,
-                dest_dir=params.webhcat_apps_dir,
-                kinnit_if_needed=kinit_if_needed,
-                hdfs_user=params.hdfs_user
-  )
-
-  CopyFromLocal('/usr/share/HDP-webhcat/pig.tar.gz',
-                owner=params.webhcat_user,
-                mode=0755,
-                dest_dir=params.webhcat_apps_dir,
-                kinnit_if_needed=kinit_if_needed,
-                hdfs_user=params.hdfs_user
-  )
-
-  CopyFromLocal('/usr/share/HDP-webhcat/hive.tar.gz',
-                owner=params.webhcat_user,
-                mode=0755,
-                dest_dir=params.webhcat_apps_dir,
-                kinnit_if_needed=kinit_if_needed,
-                hdfs_user=params.hdfs_user
-  )

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/WEBHCAT/package/scripts/webhcat_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/WEBHCAT/package/scripts/webhcat_server.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/WEBHCAT/package/scripts/webhcat_server.py
deleted file mode 100644
index 2111fa4..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/WEBHCAT/package/scripts/webhcat_server.py
+++ /dev/null
@@ -1,53 +0,0 @@
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-Ambari Agent
-
-"""
-import sys
-from resource_management import *
-
-from webhcat import webhcat
-from webhcat_service import webhcat_service
-
-class WebHCatServer(Script):
-  def install(self, env):
-    self.install_packages(env)
-  def configure(self, env):
-    import params
-    env.set_params(params)
-    webhcat()
-
-  def start(self, env):
-    import params
-    env.set_params(params)
-    self.configure(env) # FOR SECURITY
-    webhcat_service(action = 'start')
-
-  def stop(self, env):
-    import params
-    env.set_params(params)
-
-    webhcat_service(action = 'stop')
-
-  def status(self, env):
-    import status_params
-    env.set_params(status_params)
-    check_process_status(status_params.pid_file)
-
-if __name__ == "__main__":
-  WebHCatServer().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/WEBHCAT/package/scripts/webhcat_service.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/WEBHCAT/package/scripts/webhcat_service.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/WEBHCAT/package/scripts/webhcat_service.py
deleted file mode 100644
index 57ec17a..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/WEBHCAT/package/scripts/webhcat_service.py
+++ /dev/null
@@ -1,40 +0,0 @@
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-Ambari Agent
-
-"""
-from resource_management import *
-
-def webhcat_service(action='start'):
-  import params
-
-  cmd = format('env HADOOP_HOME={hadoop_home} /usr/lib/hcatalog/sbin/webhcat_server.sh')
-
-  if action == 'start':
-    demon_cmd = format('{cmd} start')
-    no_op_test = format('ls {pid_file} >/dev/null 2>&1 && ps `cat {pid_file}` >/dev/null 2>&1')
-    Execute(demon_cmd,
-            user=params.webhcat_user,
-            not_if=no_op_test
-    )
-  elif action == 'stop':
-    demon_cmd = format('{cmd} stop')
-    Execute(demon_cmd,
-            user=params.webhcat_user
-    )
-    Execute(format('rm -f {pid_file}'))

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-server/src/main/resources/stacks/HDP/1.3/role_command_order.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3/role_command_order.json b/ambari-server/src/main/resources/stacks/HDP/1.3/role_command_order.json
index a05324f..5a75a8e 100644
--- a/ambari-server/src/main/resources/stacks/HDP/1.3/role_command_order.json
+++ b/ambari-server/src/main/resources/stacks/HDP/1.3/role_command_order.json
@@ -20,10 +20,8 @@
         "WEBHCAT_SERVER-START"],
     "MAPREDUCE_SERVICE_CHECK-SERVICE_CHECK": ["JOBTRACKER-START", "TASKTRACKER-START"],
     "OOZIE_SERVICE_CHECK-SERVICE_CHECK": ["OOZIE_SERVER-START"],
-    "WEBHCAT_SERVICE_CHECK-SERVICE_CHECK": ["WEBHCAT_SERVER-START"],
     "HBASE_SERVICE_CHECK-SERVICE_CHECK": ["HBASE_MASTER-START", "HBASE_REGIONSERVER-START"],
-    "HIVE_SERVICE_CHECK-SERVICE_CHECK": ["HIVE_SERVER-START", "HIVE_METASTORE-START"],
-    "HCAT_SERVICE_CHECK-SERVICE_CHECK": ["HIVE_SERVER-START"],
+    "HIVE_SERVICE_CHECK-SERVICE_CHECK": ["HIVE_SERVER-START", "HIVE_METASTORE-START","WEBHCAT_SERVER-START"],
     "PIG_SERVICE_CHECK-SERVICE_CHECK": ["JOBTRACKER-START", "TASKTRACKER-START"],
     "SQOOP_SERVICE_CHECK-SERVICE_CHECK": ["JOBTRACKER-START", "TASKTRACKER-START"],
     "ZOOKEEPER_SERVICE_CHECK-SERVICE_CHECK": ["ZOOKEEPER_SERVER-START"],

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-server/src/main/resources/stacks/HDP/1.3/services/HIVE/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3/services/HIVE/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/1.3/services/HIVE/metainfo.xml
index bf0daf5..ca181f3 100644
--- a/ambari-server/src/main/resources/stacks/HDP/1.3/services/HIVE/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/1.3/services/HIVE/metainfo.xml
@@ -23,10 +23,5 @@
       <comment>Data warehouse system for ad-hoc queries &amp; analysis of large datasets and table &amp; storage management service</comment>
       <version>0.11.0.1.3</version>
     </service>
-    <service>
-      <name>HCATALOG</name>
-      <comment>HCATALOG</comment>
-      <version>0.11.0.1.3</version>
-    </service>
   </services>
 </metainfo>

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-server/src/main/resources/stacks/HDP/2.0.6/role_command_order.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/role_command_order.json b/ambari-server/src/main/resources/stacks/HDP/2.0.6/role_command_order.json
index 6b32edf..a92fa1d 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/role_command_order.json
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/role_command_order.json
@@ -21,10 +21,8 @@
         "WEBHCAT_SERVER-START", "FLUME_HANDLER-START"],
     "MAPREDUCE_SERVICE_CHECK-SERVICE_CHECK": ["NODEMANAGER-START", "RESOURCEMANAGER-START"],
     "OOZIE_SERVICE_CHECK-SERVICE_CHECK": ["OOZIE_SERVER-START", "MAPREDUCE2_SERVICE_CHECK-SERVICE_CHECK"],
-    "WEBHCAT_SERVICE_CHECK-SERVICE_CHECK": ["WEBHCAT_SERVER-START"],
     "HBASE_SERVICE_CHECK-SERVICE_CHECK": ["HBASE_MASTER-START", "HBASE_REGIONSERVER-START"],
-    "HIVE_SERVICE_CHECK-SERVICE_CHECK": ["HIVE_SERVER-START", "HIVE_METASTORE-START"],
-    "HCAT_SERVICE_CHECK-SERVICE_CHECK": ["HIVE_SERVER-START"],
+    "HIVE_SERVICE_CHECK-SERVICE_CHECK": ["HIVE_SERVER-START", "HIVE_METASTORE-START", "WEBHCAT_SERVER-START"],
     "PIG_SERVICE_CHECK-SERVICE_CHECK": ["NODEMANAGER-START", "RESOURCEMANAGER-START"],
     "SQOOP_SERVICE_CHECK-SERVICE_CHECK": ["NODEMANAGER-START", "RESOURCEMANAGER-START"],
     "ZOOKEEPER_SERVICE_CHECK-SERVICE_CHECK": ["ZOOKEEPER_SERVER-START"],

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/configuration/webhcat-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/configuration/webhcat-env.xml b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/configuration/webhcat-env.xml
new file mode 100644
index 0000000..14a473f
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/configuration/webhcat-env.xml
@@ -0,0 +1,54 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+<!--
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+-->
+
+<configuration>
+  <!-- webhcat-env.sh -->
+  <property>
+    <name>content</name>
+    <description>webhcat-env.sh content</description>
+    <value>
+# The file containing the running pid
+PID_FILE={{webhcat_pid_file}}
+
+TEMPLETON_LOG_DIR={{templeton_log_dir}}/
+
+
+WEBHCAT_LOG_DIR={{templeton_log_dir}}/
+
+# The console error log
+ERROR_LOG={{templeton_log_dir}}/webhcat-console-error.log
+
+# The console log
+CONSOLE_LOG={{templeton_log_dir}}/webhcat-console.log
+
+#TEMPLETON_JAR=templeton_jar_name
+
+#HADOOP_PREFIX=hadoop_prefix
+
+#HCAT_PREFIX=hive_prefix
+
+# Set HADOOP_HOME to point to a specific hadoop install directory
+export HADOOP_HOME={{hadoop_home}}
+    </value>
+  </property>
+  
+</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/configuration/webhcat-site.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/configuration/webhcat-site.xml b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/configuration/webhcat-site.xml
new file mode 100644
index 0000000..0523dab
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/configuration/webhcat-site.xml
@@ -0,0 +1,138 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!-- 
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+-->
+
+<!-- The default settings for Templeton. -->
+<!-- Edit templeton-site.xml to change settings for your local -->
+<!-- install. -->
+
+<configuration supports_final="true">
+
+  <property>
+    <name>templeton.port</name>
+      <value>50111</value>
+    <description>The HTTP port for the main server.</description>
+  </property>
+
+  <property>
+    <name>templeton.hadoop.conf.dir</name>
+    <value>/etc/hadoop/conf</value>
+    <description>The path to the Hadoop configuration.</description>
+  </property>
+
+  <property>
+    <name>templeton.jar</name>
+    <value>/usr/lib/hcatalog/share/webhcat/svr/webhcat.jar</value>
+    <description>The path to the Templeton jar file.</description>
+  </property>
+
+  <property>
+    <name>templeton.libjars</name>
+    <value>/usr/lib/zookeeper/zookeeper.jar</value>
+    <description>Jars to add the the classpath.</description>
+  </property>
+
+
+  <property>
+    <name>templeton.hadoop</name>
+    <value>/usr/bin/hadoop</value>
+    <description>The path to the Hadoop executable.</description>
+  </property>
+
+  <property>
+    <name>templeton.pig.archive</name>
+    <value>hdfs:///apps/webhcat/pig.tar.gz</value>
+    <description>The path to the Pig archive.</description>
+  </property>
+
+  <property>
+    <name>templeton.pig.path</name>
+    <value>pig.tar.gz/pig/bin/pig</value>
+    <description>The path to the Pig executable.</description>
+  </property>
+
+  <property>
+    <name>templeton.hcat</name>
+    <value>/usr/bin/hcat</value>
+    <description>The path to the hcatalog executable.</description>
+  </property>
+
+  <property>
+    <name>templeton.hive.archive</name>
+    <value>hdfs:///apps/webhcat/hive.tar.gz</value>
+    <description>The path to the Hive archive.</description>
+  </property>
+
+  <property>
+    <name>templeton.hive.home</name>
+    <value>hive.tar.gz/hive</value>
+    <description>The path to the Hive home within the tar. Has no effect if templeton.hive.archive is not set.</description>
+  </property>
+
+  <property>
+    <name>templeton.hcat.home</name>
+    <value>hive.tar.gz/hive/hcatalog</value>
+    <description>The path to the HCat home within the tar. Has no effect if templeton.hive.archive is not set.</description>
+  </property>
+
+  <property>
+    <name>templeton.hive.path</name>
+    <value>hive.tar.gz/hive/bin/hive</value>
+    <description>The path to the Hive executable.</description>
+  </property>
+
+  <property>
+    <name>templeton.hive.properties</name>
+    <value>hive.metastore.local=false, hive.metastore.uris=thrift://localhost:9933, hive.metastore.sasl.enabled=false</value>
+    <description>Properties to set when running hive.</description>
+  </property>
+
+
+  <property>
+    <name>templeton.zookeeper.hosts</name>
+    <value>localhost:2181</value>
+    <description>ZooKeeper servers, as comma separated host:port pairs</description>
+  </property>
+
+  <property>
+    <name>templeton.storage.class</name>
+    <value>org.apache.hive.hcatalog.templeton.tool.ZooKeeperStorage</value>
+    <description>The class to use as storage</description>
+  </property>
+
+  <property>
+   <name>templeton.override.enabled</name>
+   <value>false</value>
+   <description>
+     Enable the override path in templeton.override.jars
+   </description>
+ </property>
+
+ <property>
+    <name>templeton.streaming.jar</name>
+    <value>hdfs:///apps/webhcat/hadoop-streaming.jar</value>
+    <description>The hdfs path to the Hadoop streaming jar file.</description>
+  </property> 
+
+  <property>
+    <name>templeton.exec.timeout</name>
+    <value>60000</value>
+    <description>Time out for templeton api</description>
+  </property>
+
+</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/metainfo.xml
index 31bfe44..483a66f 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/metainfo.xml
@@ -75,7 +75,55 @@
             <scriptType>PYTHON</scriptType>
           </commandScript>
         </component>
-
+        <component>
+          <name>WEBHCAT_SERVER</name>
+          <displayName>WebHCat Server</displayName>
+          <category>MASTER</category>
+          <cardinality>1</cardinality>
+          <dependencies>
+            <dependency>
+              <name>HDFS/HDFS_CLIENT</name>
+              <scope>host</scope>
+              <auto-deploy>
+                <enabled>true</enabled>
+              </auto-deploy>
+            </dependency>
+            <dependency>
+              <name>MAPREDUCE2/MAPREDUCE2_CLIENT</name>
+              <scope>host</scope>
+              <auto-deploy>
+                <enabled>true</enabled>
+              </auto-deploy>
+            </dependency>
+            <dependency>
+              <name>ZOOKEEPER/ZOOKEEPER_SERVER</name>
+              <scope>cluster</scope>
+              <auto-deploy>
+                <enabled>true</enabled>
+                <co-locate>HIVE/WEBHCAT_SERVER</co-locate>
+              </auto-deploy>
+            </dependency>
+            <dependency>
+              <name>ZOOKEEPER/ZOOKEEPER_CLIENT</name>
+              <scope>host</scope>
+              <auto-deploy>
+                <enabled>true</enabled>
+              </auto-deploy>
+            </dependency>
+            <dependency>
+              <name>YARN/YARN_CLIENT</name>
+              <scope>host</scope>
+              <auto-deploy>
+                <enabled>true</enabled>
+              </auto-deploy>
+            </dependency>
+          </dependencies>
+          <commandScript>
+            <script>scripts/webhcat_server.py</script>
+            <scriptType>PYTHON</scriptType>
+            <timeout>600</timeout>
+          </commandScript>
+        </component>
         <component>
           <name>MYSQL_SERVER</name>
           <displayName>MySQL Server</displayName>
@@ -119,6 +167,37 @@
             </configFile>                         
           </configFiles>
         </component>
+        <component>
+          <name>HCAT</name>
+          <displayName>HCat</displayName>
+          <category>CLIENT</category>
+          <commandScript>
+            <script>scripts/hcat_client.py</script>
+            <scriptType>PYTHON</scriptType>
+          </commandScript>
+          <configFiles>
+            <configFile>
+              <type>xml</type>
+              <fileName>hive-site.xml</fileName>
+              <dictionaryName>hive-site</dictionaryName>
+            </configFile>
+            <configFile>
+              <type>env</type>
+              <fileName>hive-env.sh</fileName>
+              <dictionaryName>hive-env</dictionaryName>
+            </configFile>
+            <configFile>
+              <type>env</type>
+              <fileName>hive-log4j.properties</fileName>
+              <dictionaryName>hive-log4j</dictionaryName>
+            </configFile>
+            <configFile>
+              <type>env</type>
+              <fileName>hive-exec-log4j.properties</fileName>
+              <dictionaryName>hive-exec-log4j</dictionaryName>
+            </configFile>
+          </configFiles>
+        </component>
       </components>
 
       <osSpecifics>
@@ -129,6 +208,15 @@
               <name>hive</name>
             </package>
             <package>
+              <name>hcatalog</name>
+            </package>
+            <package>
+              <name>webhcat-tar-hive</name>
+            </package>
+            <package>
+              <name>webhcat-tar-pig</name>
+            </package>
+            <package>
               <name>mysql-connector-java</name>
             </package>
           </packages>
@@ -176,80 +264,9 @@
         <config-type>hive-log4j</config-type>
         <config-type>hive-exec-log4j</config-type>
         <config-type>hive-env</config-type>
+        <config-type>webhcat-site</config-type>
+        <config-type>webhcat-env</config-type>
       </configuration-dependencies>
     </service>
-
-    <service>
-      <name>HCATALOG</name>
-      <displayName>HCatalog</displayName>
-      <comment>A table and storage management layer for Hadoop that enables users with different data processing tools
-        to more easily read and write data on the grid.
-      </comment>
-      <version>0.12.0.2.0.6.0</version>
-      <components>
-        <component>
-          <name>HCAT</name>
-          <displayName>HCat</displayName>
-          <category>CLIENT</category>
-          <commandScript>
-            <script>scripts/hcat_client.py</script>
-            <scriptType>PYTHON</scriptType>
-          </commandScript>
-          <configFiles>
-            <configFile>
-              <type>xml</type>
-              <fileName>hive-site.xml</fileName>
-              <dictionaryName>hive-site</dictionaryName>
-            </configFile>
-            <configFile>
-              <type>env</type>
-              <fileName>hive-env.sh</fileName>
-              <dictionaryName>hive-env</dictionaryName>
-            </configFile>
-            <configFile>
-              <type>env</type>
-              <fileName>hive-log4j.properties</fileName>
-              <dictionaryName>hive-log4j</dictionaryName>
-            </configFile>
-            <configFile>
-              <type>env</type>
-              <fileName>hive-exec-log4j.properties</fileName>
-              <dictionaryName>hive-exec-log4j</dictionaryName>
-            </configFile>
-          </configFiles>
-        </component>
-      </components>
-      <osSpecifics>
-        <osSpecific>
-          <osFamily>any</osFamily>
-          <packages>
-            <package>
-              <name>hcatalog</name>
-            </package>
-          </packages>
-        </osSpecific>
-      </osSpecifics>
-      <commandScript>
-        <script>scripts/service_check.py</script>
-        <scriptType>PYTHON</scriptType>
-        <timeout>300</timeout>
-      </commandScript>
-      
-      <requiredServices>
-        <service>HIVE</service>
-      </requiredServices>
-      
-      <configuration-dependencies>
-        <config-type>hive-site</config-type>
-        <config-type>hive-env</config-type>
-      </configuration-dependencies>
-      <excluded-config-types>
-        <config-type>hive-env</config-type>
-        <config-type>hive-site</config-type>
-        <config-type>hive-exec-log4j</config-type>
-        <config-type>hive-log4j</config-type>
-      </excluded-config-types>
-    </service>
-
   </services>
 </metainfo>

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/files/templetonSmoke.sh
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/files/templetonSmoke.sh b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/files/templetonSmoke.sh
new file mode 100644
index 0000000..2d07b8b
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/files/templetonSmoke.sh
@@ -0,0 +1,96 @@
+#!/usr/bin/env bash
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+
+export ttonhost=$1
+export smoke_test_user=$2
+export smoke_user_keytab=$3
+export security_enabled=$4
+export kinit_path_local=$5
+export ttonurl="http://${ttonhost}:50111/templeton/v1"
+
+if [[ $security_enabled == "true" ]]; then
+  kinitcmd="${kinit_path_local}  -kt ${smoke_user_keytab} ${smoke_test_user}; "
+else
+  kinitcmd=""
+fi
+
+export no_proxy=$ttonhost
+cmd="${kinitcmd}curl --negotiate -u : -s -w 'http_code <%{http_code}>'    $ttonurl/status 2>&1"
+retVal=`su - ${smoke_test_user} -c "$cmd"`
+httpExitCode=`echo $retVal |sed 's/.*http_code <\([0-9]*\)>.*/\1/'`
+
+if [[ "$httpExitCode" -ne "200" ]] ; then
+  echo "Templeton Smoke Test (status cmd): Failed. : $retVal"
+  export TEMPLETON_EXIT_CODE=1
+  exit 1
+fi
+
+exit 0
+
+#try hcat ddl command
+echo "user.name=${smoke_test_user}&exec=show databases;" /tmp/show_db.post.txt
+cmd="${kinitcmd}curl --negotiate -u : -s -w 'http_code <%{http_code}>' -d  \@${destdir}/show_db.post.txt  $ttonurl/ddl 2>&1"
+retVal=`su - ${smoke_test_user} -c "$cmd"`
+httpExitCode=`echo $retVal |sed 's/.*http_code <\([0-9]*\)>.*/\1/'`
+
+if [[ "$httpExitCode" -ne "200" ]] ; then
+  echo "Templeton Smoke Test (ddl cmd): Failed. : $retVal"
+  export TEMPLETON_EXIT_CODE=1
+  exit  1
+fi
+
+# NOT SURE?? SUHAS
+if [[ $security_enabled == "true" ]]; then
+  echo "Templeton Pig Smoke Tests not run in secure mode"
+  exit 0
+fi
+
+#try pig query
+outname=${smoke_test_user}.`date +"%M%d%y"`.$$;
+ttonTestOutput="/tmp/idtest.${outname}.out";
+ttonTestInput="/tmp/idtest.${outname}.in";
+ttonTestScript="idtest.${outname}.pig"
+
+echo "A = load '$ttonTestInput' using PigStorage(':');"  > /tmp/$ttonTestScript
+echo "B = foreach A generate \$0 as id; " >> /tmp/$ttonTestScript
+echo "store B into '$ttonTestOutput';" >> /tmp/$ttonTestScript
+
+#copy pig script to hdfs
+su - ${smoke_test_user} -c "hadoop dfs -copyFromLocal /tmp/$ttonTestScript /tmp/$ttonTestScript"
+
+#copy input file to hdfs
+su - ${smoke_test_user} -c "hadoop dfs -copyFromLocal /etc/passwd $ttonTestInput"
+
+#create, copy post args file
+echo -n "user.name=${smoke_test_user}&file=/tmp/$ttonTestScript" > /tmp/pig_post.txt
+
+#submit pig query
+cmd="curl -s -w 'http_code <%{http_code}>' -d  \@${destdir}/pig_post.txt  $ttonurl/pig 2>&1"
+retVal=`su - ${smoke_test_user} -c "$cmd"`
+httpExitCode=`echo $retVal |sed 's/.*http_code <\([0-9]*\)>.*/\1/'`
+if [[ "$httpExitCode" -ne "200" ]] ; then
+  echo "Templeton Smoke Test (pig cmd): Failed. : $retVal"
+  export TEMPLETON_EXIT_CODE=1
+  exit 1
+fi
+
+exit 0

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/params.py
index a38c12a..fd8945f 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/params.py
@@ -36,6 +36,7 @@ if rpm_version is not None:
   hadoop_conf_dir = format("/usr/hdp/{rpm_version}/etc/hadoop/conf")
   hadoop_bin_dir = format("/usr/hdp/{rpm_version}/hadoop/bin")
   hadoop_home = format('/usr/hdp/{rpm_version}/hadoop')
+  hadoop_streeming_jars = format("/usr/hdp/{rpm_version}/hadoop-mapreduce/hadoop-streaming-*.jar")
   hive_conf_dir = format('/usr/hdp/{rpm_version}/etc/hive/conf')
   hive_client_conf_dir = format('/usr/hdp/{rpm_version}/etc/hive/conf')
   hive_server_conf_dir = format('/usr/hdp/{rpm_version}/etc/hive/conf.server')
@@ -46,16 +47,21 @@ if rpm_version is not None:
 
   if str(hdp_stack_version).startswith('2.0'):
     hcat_conf_dir = format('/usr/hdp/{rpm_version}/etc/hcatalog/conf')
+    config_dir = format('/usr/hdp/{rpm_version}/etc/hcatalog/conf')
     hcat_lib = format('/usr/hdp/{rpm_version}/hive/hcatalog/share/hcatalog')
+    webhcat_bin_dir = format('/usr/hdp/{rpm_version}/hive/hcatalog/sbin')
   # for newer versions
   else:
     hcat_conf_dir = format('/usr/hdp/{rpm_version}/etc/hive-hcatalog/conf')
+    config_dir = format('/usr/hdp/{rpm_version}/etc/hive-webhcat/conf')
     hcat_lib = format('/usr/hdp/{rpm_version}/hive/hive-hcatalog/share/hcatalog')
+    webhcat_bin_dir = format('/usr/hdp/{rpm_version}/hive/hive-hcatalog/sbin')
 
 else:
   hadoop_conf_dir = "/etc/hadoop/conf"
   hadoop_bin_dir = "/usr/bin"
   hadoop_home = '/usr'
+  hadoop_streeming_jars = '/usr/lib/hadoop-mapreduce/hadoop-streaming-*.jar'
   hive_conf_dir = "/etc/hive/conf"
   hive_bin = '/usr/lib/hive/bin'
   hive_lib = '/usr/lib/hive/lib/'
@@ -66,11 +72,15 @@ else:
 
   if str(hdp_stack_version).startswith('2.0'):
     hcat_conf_dir = '/etc/hcatalog/conf'
+    config_dir = '/etc/hcatalog/conf'
     hcat_lib = '/usr/lib/hcatalog/share/hcatalog'
+    webhcat_bin_dir = '/usr/lib/hcatalog/sbin'
   # for newer versions
   else:
     hcat_conf_dir = '/etc/hive-hcatalog/conf'
+    config_dir = '/etc/hive-webhcat/conf'
     hcat_lib = '/usr/lib/hive-hcatalog/share/hcatalog'
+    webhcat_bin_dir = '/usr/lib/hive-hcatalog/sbin'
 
 execute_path = os.environ['PATH'] + os.pathsep + hive_bin
 hive_metastore_user_name = config['configurations']['hive-site']['javax.jdo.option.ConnectionUserName']
@@ -202,9 +212,7 @@ hive_apps_whs_dir = config['configurations']['hive-site']["hive.metastore.wareho
 #for create_hdfs_directory
 hostname = config["hostname"]
 hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab']
-hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
 hdfs_principal_name = config['configurations']['hadoop-env']['hdfs_principal_name']
-kinit_path_local = functions.get_kinit_path(["/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
 
 # Tez libraries
 tez_lib_uris = default("/configurations/tez-site/tez.lib.uris", None)
@@ -228,6 +236,38 @@ if os.path.exists(mysql_jdbc_driver_jar):
 else:  
   hive_exclude_packages = []
 
+########################################################
+########### WebHCat related params #####################
+########################################################
+
+if str(config['hostLevelParams']['stack_version']).startswith('2.0'):
+  config_dir = '/etc/hcatalog/conf'
+  webhcat_bin_dir = '/usr/lib/hcatalog/sbin'
+# for newer versions
+else:
+  config_dir = '/etc/hive-webhcat/conf'
+  webhcat_bin_dir = '/usr/lib/hive-hcatalog/sbin'
+
+webhcat_env_sh_template = config['configurations']['webhcat-env']['content']
+templeton_log_dir = config['configurations']['hive-env']['hcat_log_dir']
+templeton_pid_dir = status_params.hcat_pid_dir
+
+webhcat_pid_file = status_params.webhcat_pid_file
+
+templeton_jar = config['configurations']['webhcat-site']['templeton.jar']
+
+
+webhcat_server_host = config['clusterHostInfo']['webhcat_server_host']
+
+webhcat_apps_dir = "/apps/webhcat"
+
+hcat_hdfs_user_dir = format("/user/{hcat_user}")
+hcat_hdfs_user_mode = 0755
+webhcat_hdfs_user_dir = format("/user/{webhcat_user}")
+webhcat_hdfs_user_mode = 0755
+#for create_hdfs_directory
+security_param = "true" if security_enabled else "false"
+
 import functools
 #create partial functions with common arguments for every HdfsDirectory call
 #to create hdfs directory we need to call params.HdfsDirectory in code

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/service_check.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/service_check.py
index 09ba1bf..d7b10eb 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/service_check.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/service_check.py
@@ -23,6 +23,7 @@ import socket
 import sys
 
 from hcat_service_check import hcat_service_check
+from webhcat_service_check import webhcat_service_check
 
 class HiveServiceCheck(Script):
   def service_check(self, env):
@@ -42,6 +43,7 @@ class HiveServiceCheck(Script):
       sys.exit(1)
 
     hcat_service_check()
+    webhcat_service_check()
 
 if __name__ == "__main__":
   HiveServiceCheck().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/status_params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/status_params.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/status_params.py
index d03d76b..e6f2514 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/status_params.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/status_params.py
@@ -28,6 +28,7 @@ hive_pid = 'hive-server.pid'
 hive_metastore_pid = 'hive.pid'
 
 hcat_pid_dir = config['configurations']['hive-env']['hcat_pid_dir'] #hcat_pid_dir
+webhcat_pid_file = format('{hcat_pid_dir}/webhcat.pid')
 
 if System.get_instance().os_family == "suse" or System.get_instance().os_family == "ubuntu":
   daemon_name = 'mysql'

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/webhcat.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/webhcat.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/webhcat.py
new file mode 100644
index 0000000..c56ae5f
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/webhcat.py
@@ -0,0 +1,112 @@
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+Ambari Agent
+
+"""
+from resource_management import *
+import sys
+
+
+def webhcat():
+  import params
+
+  params.HdfsDirectory(params.webhcat_apps_dir,
+                       action="create_delayed",
+                       owner=params.webhcat_user,
+                       mode=0755
+  )
+  if params.hcat_hdfs_user_dir != params.webhcat_hdfs_user_dir:
+    params.HdfsDirectory(params.hcat_hdfs_user_dir,
+                         action="create_delayed",
+                         owner=params.hcat_user,
+                         mode=params.hcat_hdfs_user_mode
+    )
+  params.HdfsDirectory(params.webhcat_hdfs_user_dir,
+                       action="create_delayed",
+                       owner=params.webhcat_user,
+                       mode=params.webhcat_hdfs_user_mode
+  )
+  params.HdfsDirectory(None, action="create")
+
+  Directory(params.templeton_pid_dir,
+            owner=params.webhcat_user,
+            mode=0755,
+            group=params.user_group,
+            recursive=True)
+
+  Directory(params.templeton_log_dir,
+            owner=params.webhcat_user,
+            mode=0755,
+            group=params.user_group,
+            recursive=True)
+
+  Directory(params.config_dir,
+            owner=params.webhcat_user,
+            group=params.user_group)
+
+  XmlConfig("webhcat-site.xml",
+            conf_dir=params.config_dir,
+            configurations=params.config['configurations']['webhcat-site'],
+            configuration_attributes=params.config['configuration_attributes']['webhcat-site'],
+            owner=params.webhcat_user,
+            group=params.user_group,
+  )
+
+  File(format("{config_dir}/webhcat-env.sh"),
+       owner=params.webhcat_user,
+       group=params.user_group,
+       content=InlineTemplate(params.webhcat_env_sh_template)
+  )
+
+  if params.security_enabled:
+    kinit_if_needed = format("{kinit_path_local} -kt {hdfs_user_keytab} {hdfs_principal_name};")
+  else:
+    kinit_if_needed = ""
+
+  if kinit_if_needed:
+    Execute(kinit_if_needed,
+            user=params.webhcat_user,
+            path='/bin'
+    )
+
+  CopyFromLocal(params.hadoop_streeming_jars,
+                owner=params.webhcat_user,
+                mode=0755,
+                dest_dir=params.webhcat_apps_dir,
+                kinnit_if_needed=kinit_if_needed,
+                hdfs_user=params.hdfs_user,
+                hadoop_conf_dir=params.hadoop_conf_dir
+  )
+
+  CopyFromLocal('/usr/share/HDP-webhcat/pig.tar.gz',
+                owner=params.webhcat_user,
+                mode=0755,
+                dest_dir=params.webhcat_apps_dir,
+                kinnit_if_needed=kinit_if_needed,
+                hdfs_user=params.hdfs_user,
+                hadoop_conf_dir=params.hadoop_conf_dir
+  )
+
+  CopyFromLocal('/usr/share/HDP-webhcat/hive.tar.gz',
+                owner=params.webhcat_user,
+                mode=0755,
+                dest_dir=params.webhcat_apps_dir,
+                kinnit_if_needed=kinit_if_needed,
+                hdfs_user=params.hdfs_user,
+                hadoop_conf_dir=params.hadoop_conf_dir
+  )

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/webhcat_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/webhcat_server.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/webhcat_server.py
new file mode 100644
index 0000000..088cb41
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/webhcat_server.py
@@ -0,0 +1,53 @@
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+Ambari Agent
+
+"""
+import sys
+from resource_management import *
+
+from webhcat import webhcat
+from webhcat_service import webhcat_service
+
+class WebHCatServer(Script):
+  def install(self, env):
+    self.install_packages(env)
+  def configure(self, env):
+    import params
+    env.set_params(params)
+    webhcat()
+
+  def start(self, env):
+    import params
+    env.set_params(params)
+    self.configure(env) # FOR SECURITY
+    webhcat_service(action = 'start')
+
+  def stop(self, env):
+    import params
+    env.set_params(params)
+
+    webhcat_service(action = 'stop')
+
+  def status(self, env):
+    import status_params
+    env.set_params(status_params)
+    check_process_status(status_params.webhcat_pid_file)
+
+if __name__ == "__main__":
+  WebHCatServer().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/webhcat_service.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/webhcat_service.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/webhcat_service.py
new file mode 100644
index 0000000..41fb529
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/webhcat_service.py
@@ -0,0 +1,40 @@
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+Ambari Agent
+
+"""
+from resource_management import *
+
+def webhcat_service(action='start'):
+  import params
+
+  cmd = format('env HADOOP_HOME={hadoop_home} {webhcat_bin_dir}/webhcat_server.sh')
+
+  if action == 'start':
+    demon_cmd = format('{cmd} start')
+    no_op_test = format('ls {webhcat_pid_file} >/dev/null 2>&1 && ps `cat {webhcat_pid_file}` >/dev/null 2>&1')
+    Execute(demon_cmd,
+            user=params.webhcat_user,
+            not_if=no_op_test
+    )
+  elif action == 'stop':
+    demon_cmd = format('{cmd} stop')
+    Execute(demon_cmd,
+            user=params.webhcat_user
+    )
+    Execute(format('rm -f {webhcat_pid_file}'))

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/webhcat_service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/webhcat_service_check.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/webhcat_service_check.py
new file mode 100644
index 0000000..8d15e47
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/webhcat_service_check.py
@@ -0,0 +1,41 @@
+#!/usr/bin/env python
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+
+from resource_management import *
+
+def webhcat_service_check():
+  import params
+  File(format("{tmp_dir}/templetonSmoke.sh"),
+       content= StaticFile('templetonSmoke.sh'),
+       mode=0755
+  )
+
+  cmd = format("{tmp_dir}/templetonSmoke.sh {webhcat_server_host[0]} {smokeuser} {smokeuser_keytab}"
+               " {security_param} {kinit_path_local}",
+               smokeuser_keytab=params.smoke_user_keytab if params.security_enabled else "no_keytab")
+
+  Execute(cmd,
+          tries=3,
+          try_sleep=5,
+          path='/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin',
+          logoutput=True)
+
+
+

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/NAGIOS/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/NAGIOS/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/NAGIOS/metainfo.xml
index bebc7d6..a72b7b4 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/NAGIOS/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/NAGIOS/metainfo.xml
@@ -59,7 +59,7 @@
               </auto-deploy>
             </dependency>
             <dependency>
-              <name>HCATALOG/HCAT</name>
+              <name>HIVE/HCAT</name>
               <scope>host</scope>
               <auto-deploy>
                 <enabled>true</enabled>

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/NAGIOS/package/templates/hadoop-servicegroups.cfg.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/NAGIOS/package/templates/hadoop-servicegroups.cfg.j2 b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/NAGIOS/package/templates/hadoop-servicegroups.cfg.j2
index 00f0740..aee9b15 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/NAGIOS/package/templates/hadoop-servicegroups.cfg.j2
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/NAGIOS/package/templates/hadoop-servicegroups.cfg.j2
@@ -54,12 +54,6 @@ define servicegroup {
   alias  OOZIE Checks
 }
 {% endif %}
-{% if hostgroup_defs['webhcat-server'] %}
-define servicegroup {
-  servicegroup_name  WEBHCAT
-  alias  WEBHCAT Checks
-}
-{% endif %}
 {% if hostgroup_defs['nagios-server'] %}
 define servicegroup {
   servicegroup_name  NAGIOS
@@ -72,7 +66,7 @@ define servicegroup {
   alias  GANGLIA Checks
 }
 {% endif %}
-{% if hostgroup_defs['hiveserver'] %}
+{% if hostgroup_defs['hiveserver'] or hostgroup_defs['webhcat-server'] %}
 define servicegroup {
   servicegroup_name  HIVE
   alias  HIVE Checks

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/NAGIOS/package/templates/hadoop-services.cfg.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/NAGIOS/package/templates/hadoop-services.cfg.j2 b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/NAGIOS/package/templates/hadoop-services.cfg.j2
index 045e9ad..8e92efc 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/NAGIOS/package/templates/hadoop-services.cfg.j2
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/NAGIOS/package/templates/hadoop-services.cfg.j2
@@ -729,7 +729,7 @@ define service {
         hostgroup_name          webhcat-server
         use                     hadoop-service
         service_description     WEBHCAT::WebHCat Server status
-        servicegroups           WEBHCAT 
+        servicegroups           HIVE
         {% if security_enabled %}
         check_command           check_templeton_status!{{ templeton_port }}!v1!{{ str(security_enabled).lower() }}!{{ nagios_keytab_path }}!{{ nagios_principal_name }}!{{ kinit_path_local }}
         {% else %}

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/configuration/webhcat-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/configuration/webhcat-env.xml b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/configuration/webhcat-env.xml
deleted file mode 100644
index 1dba691..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/configuration/webhcat-env.xml
+++ /dev/null
@@ -1,54 +0,0 @@
-<?xml version="1.0"?>
-<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
-<!--
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
--->
-
-<configuration>
-  <!-- webhcat-env.sh -->
-  <property>
-    <name>content</name>
-    <description>webhcat-env.sh content</description>
-    <value>
-# The file containing the running pid
-PID_FILE={{pid_file}}
-
-TEMPLETON_LOG_DIR={{templeton_log_dir}}/
-
-
-WEBHCAT_LOG_DIR={{templeton_log_dir}}/
-
-# The console error log
-ERROR_LOG={{templeton_log_dir}}/webhcat-console-error.log
-
-# The console log
-CONSOLE_LOG={{templeton_log_dir}}/webhcat-console.log
-
-#TEMPLETON_JAR=templeton_jar_name
-
-#HADOOP_PREFIX=hadoop_prefix
-
-#HCAT_PREFIX=hive_prefix
-
-# Set HADOOP_HOME to point to a specific hadoop install directory
-export HADOOP_HOME={{hadoop_home}}
-    </value>
-  </property>
-  
-</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/configuration/webhcat-site.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/configuration/webhcat-site.xml b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/configuration/webhcat-site.xml
deleted file mode 100644
index 0523dab..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/configuration/webhcat-site.xml
+++ /dev/null
@@ -1,138 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!-- 
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
--->
-
-<!-- The default settings for Templeton. -->
-<!-- Edit templeton-site.xml to change settings for your local -->
-<!-- install. -->
-
-<configuration supports_final="true">
-
-  <property>
-    <name>templeton.port</name>
-      <value>50111</value>
-    <description>The HTTP port for the main server.</description>
-  </property>
-
-  <property>
-    <name>templeton.hadoop.conf.dir</name>
-    <value>/etc/hadoop/conf</value>
-    <description>The path to the Hadoop configuration.</description>
-  </property>
-
-  <property>
-    <name>templeton.jar</name>
-    <value>/usr/lib/hcatalog/share/webhcat/svr/webhcat.jar</value>
-    <description>The path to the Templeton jar file.</description>
-  </property>
-
-  <property>
-    <name>templeton.libjars</name>
-    <value>/usr/lib/zookeeper/zookeeper.jar</value>
-    <description>Jars to add the the classpath.</description>
-  </property>
-
-
-  <property>
-    <name>templeton.hadoop</name>
-    <value>/usr/bin/hadoop</value>
-    <description>The path to the Hadoop executable.</description>
-  </property>
-
-  <property>
-    <name>templeton.pig.archive</name>
-    <value>hdfs:///apps/webhcat/pig.tar.gz</value>
-    <description>The path to the Pig archive.</description>
-  </property>
-
-  <property>
-    <name>templeton.pig.path</name>
-    <value>pig.tar.gz/pig/bin/pig</value>
-    <description>The path to the Pig executable.</description>
-  </property>
-
-  <property>
-    <name>templeton.hcat</name>
-    <value>/usr/bin/hcat</value>
-    <description>The path to the hcatalog executable.</description>
-  </property>
-
-  <property>
-    <name>templeton.hive.archive</name>
-    <value>hdfs:///apps/webhcat/hive.tar.gz</value>
-    <description>The path to the Hive archive.</description>
-  </property>
-
-  <property>
-    <name>templeton.hive.home</name>
-    <value>hive.tar.gz/hive</value>
-    <description>The path to the Hive home within the tar. Has no effect if templeton.hive.archive is not set.</description>
-  </property>
-
-  <property>
-    <name>templeton.hcat.home</name>
-    <value>hive.tar.gz/hive/hcatalog</value>
-    <description>The path to the HCat home within the tar. Has no effect if templeton.hive.archive is not set.</description>
-  </property>
-
-  <property>
-    <name>templeton.hive.path</name>
-    <value>hive.tar.gz/hive/bin/hive</value>
-    <description>The path to the Hive executable.</description>
-  </property>
-
-  <property>
-    <name>templeton.hive.properties</name>
-    <value>hive.metastore.local=false, hive.metastore.uris=thrift://localhost:9933, hive.metastore.sasl.enabled=false</value>
-    <description>Properties to set when running hive.</description>
-  </property>
-
-
-  <property>
-    <name>templeton.zookeeper.hosts</name>
-    <value>localhost:2181</value>
-    <description>ZooKeeper servers, as comma separated host:port pairs</description>
-  </property>
-
-  <property>
-    <name>templeton.storage.class</name>
-    <value>org.apache.hive.hcatalog.templeton.tool.ZooKeeperStorage</value>
-    <description>The class to use as storage</description>
-  </property>
-
-  <property>
-   <name>templeton.override.enabled</name>
-   <value>false</value>
-   <description>
-     Enable the override path in templeton.override.jars
-   </description>
- </property>
-
- <property>
-    <name>templeton.streaming.jar</name>
-    <value>hdfs:///apps/webhcat/hadoop-streaming.jar</value>
-    <description>The hdfs path to the Hadoop streaming jar file.</description>
-  </property> 
-
-  <property>
-    <name>templeton.exec.timeout</name>
-    <value>60000</value>
-    <description>Time out for templeton api</description>
-  </property>
-
-</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/metainfo.xml
deleted file mode 100644
index 5f493a6..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/metainfo.xml
+++ /dev/null
@@ -1,110 +0,0 @@
-<?xml version="1.0"?>
-<!--
-   Licensed to the Apache Software Foundation (ASF) under one or more
-   contributor license agreements.  See the NOTICE file distributed with
-   this work for additional information regarding copyright ownership.
-   The ASF licenses this file to You under the Apache License, Version 2.0
-   (the "License"); you may not use this file except in compliance with
-   the License.  You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-   Unless required by applicable law or agreed to in writing, software
-   distributed under the License is distributed on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-   See the License for the specific language governing permissions and
-   limitations under the License.
--->
-<metainfo>
-  <schemaVersion>2.0</schemaVersion>
-  <services>
-    <service>
-      <name>WEBHCAT</name>
-      <displayName>WebHCat</displayName>
-      <comment>Provides a REST-like web API for HCatalog and related Hadoop components.</comment>
-      <version>0.12.0.2.0</version>
-      <components>
-        <component>
-          <name>WEBHCAT_SERVER</name>
-          <displayName>WebHCat Server</displayName>
-          <category>MASTER</category>
-          <cardinality>1</cardinality>
-          <dependencies>
-            <dependency>
-              <name>HDFS/HDFS_CLIENT</name>
-              <scope>host</scope>
-              <auto-deploy>
-                <enabled>true</enabled>
-              </auto-deploy>
-            </dependency>
-            <dependency>
-              <name>MAPREDUCE2/MAPREDUCE2_CLIENT</name>
-              <scope>host</scope>
-              <auto-deploy>
-                <enabled>true</enabled>
-              </auto-deploy>
-            </dependency>
-            <dependency>
-              <name>ZOOKEEPER/ZOOKEEPER_SERVER</name>
-              <scope>cluster</scope>
-              <auto-deploy>
-                <enabled>true</enabled>
-                <co-locate>WEBHCAT/WEBHCAT_SERVER</co-locate>
-              </auto-deploy>
-            </dependency>
-            <dependency>
-              <name>ZOOKEEPER/ZOOKEEPER_CLIENT</name>
-              <scope>host</scope>
-              <auto-deploy>
-                <enabled>true</enabled>
-              </auto-deploy>
-            </dependency>
-            <dependency>
-              <name>YARN/YARN_CLIENT</name>
-              <scope>host</scope>
-              <auto-deploy>
-                <enabled>true</enabled>
-              </auto-deploy>
-            </dependency>
-          </dependencies>
-          <commandScript>
-            <script>scripts/webhcat_server.py</script>
-            <scriptType>PYTHON</scriptType>
-            <timeout>600</timeout>
-          </commandScript>
-        </component>
-      </components>
-      <osSpecifics>
-        <osSpecific>
-          <osFamily>any</osFamily>
-          <packages>
-            <package>
-              <name>hcatalog</name>
-            </package>
-            <package>
-              <name>webhcat-tar-hive</name>
-            </package>
-            <package>
-              <name>webhcat-tar-pig</name>
-            </package>
-          </packages>
-        </osSpecific>
-      </osSpecifics>
-      <commandScript>
-        <script>scripts/service_check.py</script>
-        <scriptType>PYTHON</scriptType>
-        <timeout>300</timeout>
-      </commandScript>
-      
-      <requiredServices>
-        <service>HIVE</service>
-        <service>ZOOKEEPER</service>
-      </requiredServices>
-      
-      <configuration-dependencies>
-        <config-type>webhcat-site</config-type>
-        <config-type>webhcat-env</config-type>
-      </configuration-dependencies>
-    </service>
-  </services>
-</metainfo>

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/package/files/templetonSmoke.sh
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/package/files/templetonSmoke.sh b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/package/files/templetonSmoke.sh
deleted file mode 100644
index 2d07b8b..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/package/files/templetonSmoke.sh
+++ /dev/null
@@ -1,96 +0,0 @@
-#!/usr/bin/env bash
-#
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-#
-#
-
-export ttonhost=$1
-export smoke_test_user=$2
-export smoke_user_keytab=$3
-export security_enabled=$4
-export kinit_path_local=$5
-export ttonurl="http://${ttonhost}:50111/templeton/v1"
-
-if [[ $security_enabled == "true" ]]; then
-  kinitcmd="${kinit_path_local}  -kt ${smoke_user_keytab} ${smoke_test_user}; "
-else
-  kinitcmd=""
-fi
-
-export no_proxy=$ttonhost
-cmd="${kinitcmd}curl --negotiate -u : -s -w 'http_code <%{http_code}>'    $ttonurl/status 2>&1"
-retVal=`su - ${smoke_test_user} -c "$cmd"`
-httpExitCode=`echo $retVal |sed 's/.*http_code <\([0-9]*\)>.*/\1/'`
-
-if [[ "$httpExitCode" -ne "200" ]] ; then
-  echo "Templeton Smoke Test (status cmd): Failed. : $retVal"
-  export TEMPLETON_EXIT_CODE=1
-  exit 1
-fi
-
-exit 0
-
-#try hcat ddl command
-echo "user.name=${smoke_test_user}&exec=show databases;" /tmp/show_db.post.txt
-cmd="${kinitcmd}curl --negotiate -u : -s -w 'http_code <%{http_code}>' -d  \@${destdir}/show_db.post.txt  $ttonurl/ddl 2>&1"
-retVal=`su - ${smoke_test_user} -c "$cmd"`
-httpExitCode=`echo $retVal |sed 's/.*http_code <\([0-9]*\)>.*/\1/'`
-
-if [[ "$httpExitCode" -ne "200" ]] ; then
-  echo "Templeton Smoke Test (ddl cmd): Failed. : $retVal"
-  export TEMPLETON_EXIT_CODE=1
-  exit  1
-fi
-
-# NOT SURE?? SUHAS
-if [[ $security_enabled == "true" ]]; then
-  echo "Templeton Pig Smoke Tests not run in secure mode"
-  exit 0
-fi
-
-#try pig query
-outname=${smoke_test_user}.`date +"%M%d%y"`.$$;
-ttonTestOutput="/tmp/idtest.${outname}.out";
-ttonTestInput="/tmp/idtest.${outname}.in";
-ttonTestScript="idtest.${outname}.pig"
-
-echo "A = load '$ttonTestInput' using PigStorage(':');"  > /tmp/$ttonTestScript
-echo "B = foreach A generate \$0 as id; " >> /tmp/$ttonTestScript
-echo "store B into '$ttonTestOutput';" >> /tmp/$ttonTestScript
-
-#copy pig script to hdfs
-su - ${smoke_test_user} -c "hadoop dfs -copyFromLocal /tmp/$ttonTestScript /tmp/$ttonTestScript"
-
-#copy input file to hdfs
-su - ${smoke_test_user} -c "hadoop dfs -copyFromLocal /etc/passwd $ttonTestInput"
-
-#create, copy post args file
-echo -n "user.name=${smoke_test_user}&file=/tmp/$ttonTestScript" > /tmp/pig_post.txt
-
-#submit pig query
-cmd="curl -s -w 'http_code <%{http_code}>' -d  \@${destdir}/pig_post.txt  $ttonurl/pig 2>&1"
-retVal=`su - ${smoke_test_user} -c "$cmd"`
-httpExitCode=`echo $retVal |sed 's/.*http_code <\([0-9]*\)>.*/\1/'`
-if [[ "$httpExitCode" -ne "200" ]] ; then
-  echo "Templeton Smoke Test (pig cmd): Failed. : $retVal"
-  export TEMPLETON_EXIT_CODE=1
-  exit 1
-fi
-
-exit 0

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/package/scripts/__init__.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/package/scripts/__init__.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/package/scripts/__init__.py
deleted file mode 100644
index 35de4bb..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/package/scripts/__init__.py
+++ /dev/null
@@ -1,20 +0,0 @@
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-Ambari Agent
-
-"""

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/package/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/package/scripts/params.py
deleted file mode 100644
index f37ac27..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/package/scripts/params.py
+++ /dev/null
@@ -1,102 +0,0 @@
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-Ambari Agent
-
-"""
-
-from resource_management import *
-import status_params
-
-# server configurations
-config = Script.get_config()
-tmp_dir = Script.get_tmp_dir()
-
-#RPM versioning support
-rpm_version = default("/configurations/hadoop-env/rpm_version", None)
-
-#hadoop params
-hdp_stack_version = config['hostLevelParams']['stack_version']
-if rpm_version is not None:
-  hadoop_bin_dir = format("/usr/hdp/{rpm_version}/hadoop/bin")
-  hadoop_home = format('/usr/hdp/{rpm_version}/hadoop')
-  hadoop_streeming_jars = format("/usr/hdp/{rpm_version}/hadoop-mapreduce/hadoop-streaming-*.jar")
-  if str(hdp_stack_version).startswith('2.0'):
-    config_dir = format('/usr/hdp/{rpm_version}/etc/hcatalog/conf')
-    webhcat_bin_dir = format('/usr/hdp/{rpm_version}/hive/hcatalog/sbin')
-  # for newer versions
-  else:
-    config_dir = format('/usr/hdp/{rpm_version}/etc/hive-webhcat/conf')
-    webhcat_bin_dir = format('/usr/hdp/{rpm_version}/hive/hive-hcatalog/sbin')
-else:
-  hadoop_bin_dir = "/usr/bin"
-  hadoop_home = '/usr'
-  hadoop_streeming_jars = '/usr/lib/hadoop-mapreduce/hadoop-streaming-*.jar'
-  if str(hdp_stack_version).startswith('2.0'):
-    config_dir = '/etc/hcatalog/conf'
-    webhcat_bin_dir = '/usr/lib/hcatalog/sbin'
-  # for newer versions
-  else:
-    config_dir = '/etc/hive-webhcat/conf'
-    webhcat_bin_dir = '/usr/lib/hive-hcatalog/sbin'
-
-hcat_user = config['configurations']['hive-env']['hcat_user']
-webhcat_user = config['configurations']['hive-env']['webhcat_user']
-
-webhcat_env_sh_template = config['configurations']['webhcat-env']['content']
-templeton_log_dir = config['configurations']['hive-env']['hcat_log_dir']
-templeton_pid_dir = status_params.templeton_pid_dir
-
-pid_file = status_params.pid_file
-
-hadoop_conf_dir = config['configurations']['webhcat-site']['templeton.hadoop.conf.dir']
-templeton_jar = config['configurations']['webhcat-site']['templeton.jar']
-
-user_group = config['configurations']['cluster-env']['user_group']
-
-webhcat_server_host = config['clusterHostInfo']['webhcat_server_host']
-
-webhcat_apps_dir = "/apps/webhcat"
-smoke_user_keytab = config['configurations']['cluster-env']['smokeuser_keytab']
-smokeuser = config['configurations']['cluster-env']['smokeuser']
-security_enabled = config['configurations']['cluster-env']['security_enabled']
-kinit_path_local = functions.get_kinit_path(["/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
-
-hcat_hdfs_user_dir = format("/user/{hcat_user}")
-hcat_hdfs_user_mode = 0755
-webhcat_hdfs_user_dir = format("/user/{webhcat_user}")
-webhcat_hdfs_user_mode = 0755
-webhcat_apps_dir = "/apps/webhcat"
-#for create_hdfs_directory
-hostname = config["hostname"]
-security_param = "true" if security_enabled else "false"
-hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab']
-hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
-hdfs_principal_name = config['configurations']['hadoop-env']['hdfs_principal_name']
-kinit_path_local = functions.get_kinit_path(["/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
-import functools
-#create partial functions with common arguments for every HdfsDirectory call
-#to create hdfs directory we need to call params.HdfsDirectory in code
-HdfsDirectory = functools.partial(
-  HdfsDirectory,
-  conf_dir=hadoop_conf_dir,
-  hdfs_user=hdfs_user,
-  security_enabled = security_enabled,
-  keytab = hdfs_user_keytab,
-  kinit_path_local = kinit_path_local,
-  bin_dir = hadoop_bin_dir
-)

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/package/scripts/service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/package/scripts/service_check.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/package/scripts/service_check.py
deleted file mode 100644
index 0e3c0f0..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/package/scripts/service_check.py
+++ /dev/null
@@ -1,45 +0,0 @@
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-Ambari Agent
-
-"""
-from resource_management import *
-
-class WebHCatServiceCheck(Script):
-  def service_check(self, env):
-    import params
-
-    env.set_params(params)
-
-    File(format("{tmp_dir}/templetonSmoke.sh"),
-         content= StaticFile('templetonSmoke.sh'),
-         mode=0755
-    )
-
-    cmd = format("{tmp_dir}/templetonSmoke.sh {webhcat_server_host[0]} {smokeuser} {smokeuser_keytab}"
-                 " {security_param} {kinit_path_local}",
-                 smokeuser_keytab=params.smoke_user_keytab if params.security_enabled else "no_keytab")
-
-    Execute(cmd,
-            tries=3,
-            try_sleep=5,
-            path='/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin',
-            logoutput=True)
-
-if __name__ == "__main__":
-  WebHCatServiceCheck().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/package/scripts/status_params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/package/scripts/status_params.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/package/scripts/status_params.py
deleted file mode 100644
index 23823e6..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/package/scripts/status_params.py
+++ /dev/null
@@ -1,26 +0,0 @@
-#!/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-
-from resource_management import *
-
-config = Script.get_config()
-
-templeton_pid_dir = config['configurations']['hive-env']['hcat_pid_dir']
-pid_file = format('{templeton_pid_dir}/webhcat.pid')


[04/27] git commit: AMBARI-7304 Python TestExecuteHadoopResource test failures (dsen)

Posted by jo...@apache.org.
AMBARI-7304 Python TestExecuteHadoopResource test failures (dsen)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/258f4541
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/258f4541
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/258f4541

Branch: refs/heads/branch-alerts-dev
Commit: 258f454191ad88c0f2ade1c40fb233e0b09b4ebf
Parents: ae0b1f3
Author: Dmytro Sen <ds...@hortonworks.com>
Authored: Mon Sep 15 14:28:33 2014 +0300
Committer: Dmytro Sen <ds...@hortonworks.com>
Committed: Mon Sep 15 14:32:00 2014 +0300

----------------------------------------------------------------------
 .../TestExecuteHadoopResource.py                | 37 ++++++++++++++++----
 1 file changed, 31 insertions(+), 6 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/258f4541/ambari-agent/src/test/python/resource_management/TestExecuteHadoopResource.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/test/python/resource_management/TestExecuteHadoopResource.py b/ambari-agent/src/test/python/resource_management/TestExecuteHadoopResource.py
index e357390..28453a5 100644
--- a/ambari-agent/src/test/python/resource_management/TestExecuteHadoopResource.py
+++ b/ambari-agent/src/test/python/resource_management/TestExecuteHadoopResource.py
@@ -15,6 +15,7 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 See the License for the specific language governing permissions and
 limitations under the License.
 '''
+import os
 
 from unittest import TestCase
 from mock.mock import patch
@@ -38,7 +39,11 @@ class TestExecuteHadoopResource(TestCase):
       self.assertEqual(execute_mock.call_count, 1)
       self.assertEqual(execute_mock.call_args[0][0].command,'hadoop --config conf_dir command')
       self.assertEqual(execute_mock.call_args[0][0].arguments,
-                       {'logoutput': True, 'tries': 1, 'user': 'user', 'try_sleep': 0})
+                       {'logoutput': True,
+                        'tries': 1,
+                        'user': 'user',
+                        'try_sleep': 0,
+                        'environment': {'PATH': os.environ['PATH']}})
 
 
   @patch("resource_management.core.providers.system.ExecuteProvider")
@@ -58,7 +63,11 @@ class TestExecuteHadoopResource(TestCase):
       self.assertEqual(execute_mock.call_count, 1)
       self.assertEqual(execute_mock.call_args[0][0].command,'hadoop --config conf_dir command')
       self.assertEqual(execute_mock.call_args[0][0].arguments,
-                       {'logoutput': False, 'tries': 1, 'user': 'user', 'try_sleep': 0})
+                       {'logoutput': False,
+                        'tries': 1,
+                        'user': 'user',
+                        'try_sleep': 0,
+                        'environment': {'PATH': os.environ['PATH']}})
 
 
   @patch("resource_management.core.providers.system.ExecuteProvider")
@@ -83,7 +92,11 @@ class TestExecuteHadoopResource(TestCase):
       self.assertEqual(execute_mock.call_count, 1)
       self.assertEqual(execute_mock.call_args[0][0].command,'hadoop --config conf_dir command')
       self.assertEqual(execute_mock.call_args[0][0].arguments,
-                       {'logoutput': True, 'tries': 2, 'user': 'user', 'try_sleep': 2})
+                       {'logoutput': True,
+                        'tries': 2,
+                        'user': 'user',
+                        'try_sleep': 2,
+                        'environment': {'PATH': os.environ['PATH']}})
 
 
   @patch("resource_management.core.providers.system.ExecuteProvider")
@@ -105,9 +118,17 @@ class TestExecuteHadoopResource(TestCase):
       self.assertEqual(execute_mock.call_args_list[1][0][0].command,
                        'hadoop --config conf_dir command2')
       self.assertEqual(execute_mock.call_args_list[0][0][0].arguments,
-                       {'logoutput': False, 'tries': 1, 'user': 'user', 'try_sleep': 0})
+                       {'logoutput': False,
+                        'tries': 1,
+                        'user': 'user',
+                        'try_sleep': 0,
+                        'environment': {'PATH': os.environ['PATH']}})
       self.assertEqual(execute_mock.call_args_list[1][0][0].arguments,
-                       {'logoutput': False, 'tries': 1, 'user': 'user', 'try_sleep': 0})
+                       {'logoutput': False,
+                        'tries': 1,
+                        'user': 'user',
+                        'try_sleep': 0,
+                        'environment': {'PATH': os.environ['PATH']}})
 
 
   @patch("resource_management.core.providers.system.ExecuteProvider")
@@ -156,7 +177,11 @@ class TestExecuteHadoopResource(TestCase):
       self.assertEqual(execute_mock.call_args_list[1][0][0].command,
                        'hadoop --config conf_dir command')
       self.assertEqual(execute_mock.call_args_list[1][0][0].arguments,
-                       {'logoutput': True, 'tries': 1, 'user': 'user', 'try_sleep': 0})
+                       {'logoutput': True,
+                        'tries': 1,
+                        'user': 'user',
+                        'try_sleep': 0,
+                        'environment': {'PATH': os.environ['PATH']}})
 
 
   @patch("resource_management.core.providers.system.ExecuteProvider")


[26/27] git commit: AMBARI-7328 Config versions have wrong values after switching groups. (atkach)

Posted by jo...@apache.org.
AMBARI-7328 Config versions have wrong values after switching groups. (atkach)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/bd04a28e
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/bd04a28e
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/bd04a28e

Branch: refs/heads/branch-alerts-dev
Commit: bd04a28e14171f8d64fb6b62678a0a441f444005
Parents: 127978c
Author: atkach <at...@hortonworks.com>
Authored: Tue Sep 16 14:30:45 2014 +0300
Committer: atkach <at...@hortonworks.com>
Committed: Tue Sep 16 14:30:45 2014 +0300

----------------------------------------------------------------------
 .../controllers/main/service/info/configs.js    | 113 +++++++++++++------
 ambari-web/app/utils/ajax/ajax.js               |   9 ++
 .../views/common/configs/config_history_flow.js |   2 +-
 3 files changed, 86 insertions(+), 38 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/bd04a28e/ambari-web/app/controllers/main/service/info/configs.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/controllers/main/service/info/configs.js b/ambari-web/app/controllers/main/service/info/configs.js
index 2f79685..e10a0ba 100644
--- a/ambari-web/app/controllers/main/service/info/configs.js
+++ b/ambari-web/app/controllers/main/service/info/configs.js
@@ -48,8 +48,8 @@ App.MainServiceInfoConfigsController = Em.Controller.extend(App.ServerValidatorM
   preSelectedConfigVersion: null,
   // contain Service Config Property, when user proceed from Select Config Group dialog
   overrideToAdd: null,
-  //latest version of service config versions
-  currentVersion: null,
+  //version of default config group, configs of which currently applied
+  currentDefaultVersion: null,
   //version selected to view
   selectedVersion: null,
   // file names of changed configs
@@ -58,8 +58,8 @@ App.MainServiceInfoConfigsController = Em.Controller.extend(App.ServerValidatorM
   serviceConfigVersionNote: '',
   versionLoaded: false,
   isCurrentSelected: function () {
-    return this.get('selectedVersion') === this.get('currentVersion');
-  }.property('selectedVersion', 'currentVersion'),
+    return App.ServiceConfigVersion.find(this.get('content.serviceName') + "_" + this.get('selectedVersion')).get('isCurrent');
+  }.property('selectedVersion'),
   serviceConfigs: function () {
     return App.config.get('preDefinedServiceConfigs');
   }.property('App.config.preDefinedServiceConfigs'),
@@ -179,6 +179,16 @@ App.MainServiceInfoConfigsController = Em.Controller.extend(App.ServerValidatorM
   }.property('propertyFilters', 'isCompareMode'),
 
   /**
+   * indicate wtether service config version belongs to default config group
+   * @method isVersionDefault
+   * @param version
+   * @return {Boolean}
+   */
+  isVersionDefault: function(version) {
+    return version && version.get('groupId') == -1;
+  },
+
+  /**
    * clear and set properties to default value
    */
   clearStep: function () {
@@ -290,41 +300,45 @@ App.MainServiceInfoConfigsController = Em.Controller.extend(App.ServerValidatorM
 
   /**
    * load service config versions to model
-   * set currentVersion
+   * set currentDefaultVersion
    * @param data
    * @param opt
    * @param params
    */
   loadServiceConfigVersionsSuccess: function (data, opt, params) {
     App.serviceConfigVersionsMapper.map(data);
+    this.set('currentDefaultVersion', data.items.filterProperty('group_id', -1).findProperty('is_current').service_config_version);
     if (this.get('preSelectedConfigVersion')) {
-      this.set('currentVersion', this.get('preSelectedConfigVersion.version'));
+      this.loadSelectedVersion(this.get('preSelectedConfigVersion.version'));
     } else {
-      this.set('currentVersion', data.items.filterProperty('group_id', -1).findProperty('is_current').service_config_version);
+      this.loadSelectedVersion();
     }
-    this.loadSelectedVersion();
   },
 
   /**
    * get selected service config version
-   * In case selected version is undefined then take currentVersion
+   * In case selected version is undefined then take currentDefaultVersion
    * @param version
    */
   loadSelectedVersion: function (version) {
     var self = this;
     this.set('versionLoaded', false);
-    var groupName = App.ServiceConfigVersion.find(this.get('content.serviceName') + "_" + version).get('groupName');
+    version = version || this.get('currentDefaultVersion');
+    var versionRecord = App.ServiceConfigVersion.find(this.get('content.serviceName') + "_" + version);
+    //version of non-default group require properties from current version of default group to correctly display page
+    var versions = (this.isVersionDefault(versionRecord)) ? [version] : [this.get('currentDefaultVersion'), version];
 
-    if (self.get('dataIsLoaded') && !(groupName && this.get('selectedConfigGroup.name') === groupName)) {
+    //if version from default group selected then switch to default group
+    if (self.get('dataIsLoaded') && this.isVersionDefault(versionRecord)) {
       this.set('selectedConfigGroup', this.get('configGroups').findProperty('isDefault'));
     }
 
     App.ajax.send({
-      name: 'service.serviceConfigVersion.get',
+      name: 'service.serviceConfigVersions.get.multiple',
       sender: this,
       data: {
         serviceName: this.get('content.serviceName'),
-        serviceConfigVersion: version || this.get('currentVersion')
+        serviceConfigVersions: versions
       },
       success: 'loadSelectedVersionSuccess'
     }).complete(function () {
@@ -346,18 +360,36 @@ App.MainServiceInfoConfigsController = Em.Controller.extend(App.ServerValidatorM
     var serviceConfigsDef = this.get('serviceConfigs').findProperty('serviceName', this.get('content.serviceName'));
     var siteToTagMap = {};
     var configTypesRendered = Object.keys(serviceConfigsDef.get('configTypesRendered'));
+    var selectedVersion = params.serviceConfigVersions.length > 1 ? params.serviceConfigVersions[1] : params.serviceConfigVersions[0];
+    var configurations = [];
+
 
     configTypesRendered.forEach(function (siteName) {
-      if (data.items[0].configurations.someProperty('type', siteName)) {
-        siteToTagMap[siteName] = data.items[0].configurations.findProperty('type', siteName).tag;
-      } else {
-        siteToTagMap[siteName] = 'version1';
-      }
+      data.items.forEach(function (item) {
+        if (item.group_id == -1) {
+          configurations = item.configurations;
+          if (item.configurations.someProperty('type', siteName)) {
+            siteToTagMap[siteName] = item.configurations.findProperty('type', siteName).tag;
+          } else {
+            siteToTagMap[siteName] = 'version1';
+          }
+        } else {
+          //set config tags of non-default config group to load overrides from selected version
+          this.loadedGroupToOverrideSiteToTagMap[item.group_name] = {};
+          item.configurations.forEach(function (config) {
+            this.loadedGroupToOverrideSiteToTagMap[item.group_name][config.type] = config.tag;
+          }, this)
+        }
+      }, this)
     }, this);
 
-    App.router.get('configurationController').saveToDB(data.items[0].configurations);
+    App.router.get('configurationController').saveToDB(configurations);
     this.loadedClusterSiteToTagMap = siteToTagMap;
-    this.set('selectedVersion', params.serviceConfigVersion);
+    this.set('selectedVersion', selectedVersion);
+    //reset map if selected current version of default group
+    if (this.get('isCurrentSelected') && selectedVersion === this.get('currentDefaultVersion')) {
+      this.loadedGroupToOverrideSiteToTagMap = {};
+    }
   },
 
   /**
@@ -376,19 +408,19 @@ App.MainServiceInfoConfigsController = Em.Controller.extend(App.ServerValidatorM
   },
 
   loadServiceConfigsSuccess: function (data, opt, params) {
+    if (App.get('supports.configHistory')) {
+      this.setConfigGroups(data, opt, params);
+      return;
+    }
     if (data) {
       this.setConfigGroups(data, opt, params);
     } else {
-      if (!App.get('supports.configHistory')) {
-        App.ajax.send({
-          name: 'config.tags',
-          sender: this,
-          data: App.permit(params, ['clusterName', 'serviceConfigsDef', 'serviceName']),
-          success: 'setConfigGroups'
-        });
-      }  else {
-        this.setConfigGroups(data, opt, params);
-      }
+      App.ajax.send({
+        name: 'config.tags',
+        sender: this,
+        data: App.permit(params, ['clusterName', 'serviceConfigsDef', 'serviceName']),
+        success: 'setConfigGroups'
+      });
     }
   },
 
@@ -480,15 +512,20 @@ App.MainServiceInfoConfigsController = Em.Controller.extend(App.ServerValidatorM
     var selectedConfigGroup = this.get('selectedConfigGroup');
     var serviceName = this.get('content.serviceName');
     //STEP 1: handle tags from JSON data for host overrides
-    this.loadedGroupToOverrideSiteToTagMap = {};
+    if (!App.supports.configHistory) {
+      //if config history enabled then loadedGroupToOverrideSiteToTagMap already has content set in loadSelectedVersionSuccess()
+      this.loadedGroupToOverrideSiteToTagMap = {};
+    }
     var configGroupsWithOverrides = selectedConfigGroup.get('isDefault') && !this.get('isHostsConfigsPage') ? this.get('configGroups') : [selectedConfigGroup];
     configGroupsWithOverrides.forEach(function (item) {
       var groupName = item.get('name');
-      this.loadedGroupToOverrideSiteToTagMap[groupName] = {};
-      item.get('configSiteTags').forEach(function (siteTag) {
-        var site = siteTag.get('site');
-        this.loadedGroupToOverrideSiteToTagMap[groupName][site] = siteTag.get('tag');
-      }, this);
+      if (Em.isNone(this.loadedGroupToOverrideSiteToTagMap[groupName])) {
+        this.loadedGroupToOverrideSiteToTagMap[groupName] = {};
+        item.get('configSiteTags').forEach(function (siteTag) {
+          var site = siteTag.get('site');
+          this.loadedGroupToOverrideSiteToTagMap[groupName][site] = siteTag.get('tag');
+        }, this);
+      }
     }, this);
     //STEP 2: Create an array of objects defining tag names to be polled and new tag names to be set after submit
     this.setServiceConfigTags(this.loadedClusterSiteToTagMap);
@@ -515,7 +552,7 @@ App.MainServiceInfoConfigsController = Em.Controller.extend(App.ServerValidatorM
       //STEP load configs of version being compared against
       self.loadCompareVersionConfigs(self.get('allConfigs')).done(function (isComparison) {
         //STEP 9: Load and add overriden configs of group
-        if (!isComparison && self.get('isCurrentSelected')) {
+        if (!isComparison && (!self.get('selectedConfigGroup').get('isDefault') || self.get('isCurrentSelected'))) {
           App.config.loadServiceConfigGroupOverrides(self.get('allConfigs'), self.get('loadedGroupToOverrideSiteToTagMap'), self.get('configGroups'), self.onLoadOverrides, self);
         } else {
           self.onLoadOverrides(self.get('allConfigs'));
@@ -2560,6 +2597,8 @@ App.MainServiceInfoConfigsController = Em.Controller.extend(App.ServerValidatorM
         return;
       }
     }
+    //clean when switch config group
+    this.loadedGroupToOverrideSiteToTagMap = {};
     this.set('selectedConfigGroup', event.context);
   },
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/bd04a28e/ambari-web/app/utils/ajax/ajax.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/utils/ajax/ajax.js b/ambari-web/app/utils/ajax/ajax.js
index 3d1fda9..2dceaa5 100644
--- a/ambari-web/app/utils/ajax/ajax.js
+++ b/ambari-web/app/utils/ajax/ajax.js
@@ -1906,6 +1906,15 @@ var urls = {
     real: '/clusters/{clusterName}/configurations/service_config_versions?service_name={serviceName}&service_config_version={serviceConfigVersion}',
     mock: '/data/configurations/service_version.json'
   },
+  'service.serviceConfigVersions.get.multiple': {
+    real: '/clusters/{clusterName}/configurations/service_config_versions?service_name={serviceName}&service_config_version.in({serviceConfigVersions})',
+    mock: '/data/configurations/service_version.json',
+    format: function(data) {
+      return {
+        serviceConfigVersions: data.serviceConfigVersions.join(',')
+      }
+    }
+  },
   'service.serviceConfigVersion.revert': {
     'real': '/clusters/{clusterName}',
     'mock': '',

http://git-wip-us.apache.org/repos/asf/ambari/blob/bd04a28e/ambari-web/app/views/common/configs/config_history_flow.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views/common/configs/config_history_flow.js b/ambari-web/app/views/common/configs/config_history_flow.js
index 1fe7d23..a9c36f4 100644
--- a/ambari-web/app/views/common/configs/config_history_flow.js
+++ b/ambari-web/app/views/common/configs/config_history_flow.js
@@ -159,7 +159,7 @@ App.ConfigHistoryFlowView = Em.View.extend({
     var serviceVersions = this.get('serviceVersions');
     var startIndex = 0;
     var currentIndex = 0;
-    var selectedVersion = this.get('controller.currentVersion');
+    var selectedVersion = this.get('controller.selectedVersion');
 
     serviceVersions.setEach('isDisplayed', false);
 


[24/27] git commit: AMBARI-7321. Slider view: Iron out story on how app-packages are deployed and instances created (srimanth)

Posted by jo...@apache.org.
AMBARI-7321. Slider view: Iron out story on how app-packages are deployed and instances created (srimanth)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/63f5ed2c
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/63f5ed2c
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/63f5ed2c

Branch: refs/heads/branch-alerts-dev
Commit: 63f5ed2cc25f495cf216a350b32f45132adc0725
Parents: a412da8
Author: Srimanth Gunturi <sg...@hortonworks.com>
Authored: Mon Sep 15 20:48:07 2014 -0700
Committer: Srimanth Gunturi <sg...@hortonworks.com>
Committed: Mon Sep 15 20:48:12 2014 -0700

----------------------------------------------------------------------
 contrib/views/slider/pom.xml                                 | 8 ++++++++
 .../ambari/view/slider/SliderAppsViewControllerImpl.java     | 7 +++++++
 .../slider/src/main/resources/ui/app/models/slider_app.js    | 2 +-
 3 files changed, 16 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/63f5ed2c/contrib/views/slider/pom.xml
----------------------------------------------------------------------
diff --git a/contrib/views/slider/pom.xml b/contrib/views/slider/pom.xml
index cfb0abc..82d27bb 100644
--- a/contrib/views/slider/pom.xml
+++ b/contrib/views/slider/pom.xml
@@ -549,6 +549,14 @@
 				<filtering>false</filtering>
 				<directory>target/lib</directory>
 			</resource>
+			<resource>
+				<targetPath>WEB-INF/lib</targetPath>
+				<filtering>true</filtering>
+				<directory>lib</directory>
+				<includes>
+					<include>slider-agent.tar.gz</include>
+				</includes>
+			</resource>
 		</resources>
 		<pluginManagement>
 			<plugins>

http://git-wip-us.apache.org/repos/asf/ambari/blob/63f5ed2c/contrib/views/slider/src/main/java/org/apache/ambari/view/slider/SliderAppsViewControllerImpl.java
----------------------------------------------------------------------
diff --git a/contrib/views/slider/src/main/java/org/apache/ambari/view/slider/SliderAppsViewControllerImpl.java b/contrib/views/slider/src/main/java/org/apache/ambari/view/slider/SliderAppsViewControllerImpl.java
index c4871f2..cc35dec 100644
--- a/contrib/views/slider/src/main/java/org/apache/ambari/view/slider/SliderAppsViewControllerImpl.java
+++ b/contrib/views/slider/src/main/java/org/apache/ambari/view/slider/SliderAppsViewControllerImpl.java
@@ -57,6 +57,7 @@ import org.apache.slider.client.SliderClient;
 import org.apache.slider.common.SliderKeys;
 import org.apache.slider.common.params.ActionCreateArgs;
 import org.apache.slider.common.params.ActionFreezeArgs;
+import org.apache.slider.common.params.ActionInstallPackageArgs;
 import org.apache.slider.common.params.ActionThawArgs;
 import org.apache.slider.common.tools.SliderFileSystem;
 import org.apache.slider.core.exceptions.SliderException;
@@ -678,9 +679,15 @@ public class SliderAppsViewControllerImpl implements SliderAppsViewController {
       createArgs.image = new Path(hdfsLocation
           + "/user/yarn/agent/slider-agent.tar.gz");
       
+      final ActionInstallPackageArgs installArgs = new ActionInstallPackageArgs();
+      installArgs.name = appName;
+      installArgs.packageURI = getAppsFolderPath() + configs.get("application.def").getAsString();
+      installArgs.replacePkg = true;
+
       return invokeSliderClientRunnable(new SliderClientContextRunnable<String>() {
         @Override
         public String run(SliderClient sliderClient) throws YarnException, IOException, InterruptedException {
+          sliderClient.actionInstallPkg(installArgs);
           sliderClient.actionCreate(appName, createArgs);
           ApplicationId applicationId = sliderClient.applicationId;
           if (applicationId != null) {

http://git-wip-us.apache.org/repos/asf/ambari/blob/63f5ed2c/contrib/views/slider/src/main/resources/ui/app/models/slider_app.js
----------------------------------------------------------------------
diff --git a/contrib/views/slider/src/main/resources/ui/app/models/slider_app.js b/contrib/views/slider/src/main/resources/ui/app/models/slider_app.js
index 3710877..7408911 100644
--- a/contrib/views/slider/src/main/resources/ui/app/models/slider_app.js
+++ b/contrib/views/slider/src/main/resources/ui/app/models/slider_app.js
@@ -114,7 +114,7 @@ App.SliderApp = DS.Model.extend({
   showMetrics: function() {
     var global = this.get('configs')['global'];
     //check whether slider has GANGLIA configured if not metrics should be hidden
-    if (!(global['ganglia_server_host'] && global['ganglia_server_id'] && global['ganglia_server_port'])) {
+    if (!(global && global['ganglia_server_host'] && global['ganglia_server_id'] && global['ganglia_server_port'])) {
       return false;
     }
     return App.SliderApp.Status.running === this.get('status');


[10/27] git commit: AMBARI-7312. Ambari namenode UI link checks deprecated property for ssl enabled hdfs for HDP 2.1.x. Additional path. (Denys Buzhor via akovalenko)

Posted by jo...@apache.org.
AMBARI-7312. Ambari namenode UI link checks deprecated property for ssl enabled hdfs for HDP 2.1.x. Additional path. (Denys Buzhor via akovalenko)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/02e9fdbc
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/02e9fdbc
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/02e9fdbc

Branch: refs/heads/branch-alerts-dev
Commit: 02e9fdbc1941c8ce5e2a5c373f86b68ac0d6b34d
Parents: 007288a
Author: Aleksandr Kovalenko <ak...@hortonworks.com>
Authored: Mon Sep 15 19:31:07 2014 +0300
Committer: Aleksandr Kovalenko <ak...@hortonworks.com>
Committed: Mon Sep 15 19:33:51 2014 +0300

----------------------------------------------------------------------
 ambari-web/app/views/common/quick_view_link_view.js | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/02e9fdbc/ambari-web/app/views/common/quick_view_link_view.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views/common/quick_view_link_view.js b/ambari-web/app/views/common/quick_view_link_view.js
index 0304559..f941100 100644
--- a/ambari-web/app/views/common/quick_view_link_view.js
+++ b/ambari-web/app/views/common/quick_view_link_view.js
@@ -309,7 +309,7 @@ App.QuickViewLinks = Em.View.extend({
     var hadoopSslEnabled = false;
     if (configProperties && configProperties.length > 0) {
       var site = configProperties.findProperty('type', 'core-site');
-      if (parseInt(App.get('currentStackVersionNumber')[0]) > 1) {
+      if (App.get('isHadoop2Stack')) {
         hadoopSslEnabled = (Em.get(site, 'properties') && site.properties['dfs.http.policy'] === 'HTTPS_ONLY');
       } else {
         hadoopSslEnabled = (Em.get(site, 'properties') &&  site.properties['hadoop.ssl.enabled'] == true);


[15/27] AMBARI-7296. HCatalog and WebHCat services should not be managed as separate service (should be part of Hive service) (jaimin)

Posted by jo...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-web/app/data/HDP2/site_properties.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/data/HDP2/site_properties.js b/ambari-web/app/data/HDP2/site_properties.js
index edcb322..e2cb1cf 100644
--- a/ambari-web/app/data/HDP2/site_properties.js
+++ b/ambari-web/app/data/HDP2/site_properties.js
@@ -1564,7 +1564,7 @@ module.exports =
       "name": "templeton.hive.archive",
       "displayName": "templeton.hive.archive",
       "isRequired": false,
-      "serviceName": "WEBHCAT",
+      "serviceName": "HIVE",
       "category": "Advanced webhcat-site"
     },
     {
@@ -1572,7 +1572,7 @@ module.exports =
       "name": "templeton.pig.archive",
       "displayName": "templeton.pig.archive",
       "isRequired": false,
-      "serviceName": "WEBHCAT",
+      "serviceName": "HIVE",
       "category": "Advanced webhcat-site"
     },
     {
@@ -1580,7 +1580,7 @@ module.exports =
       "name": "templeton.zookeeper.hosts",
       "displayName": "templeton.zookeeper.hosts",
       "displayType": "multiLine",
-      "serviceName": "WEBHCAT",
+      "serviceName": "HIVE",
       "category": "Advanced webhcat-site"
     },
   /**********************************************pig.properties*****************************************/
@@ -2585,7 +2585,7 @@ module.exports =
       "filename": "hive-env.xml",
       "category": "Advanced hive-env"
     },
-  /**********************************************WEBHCAT***************************************/
+  /**********************************************HIVE***************************************/
     {
       "id": "puppet var",
       "name": "webhcatserver_host",
@@ -2597,7 +2597,7 @@ module.exports =
       "isOverridable": false,
       "isVisible": true,
       "isRequiredByAgent": false,
-      "serviceName": "WEBHCAT",
+      "serviceName": "HIVE",
       "filename": "webhcat-env.xml",
       "category": "WEBHCAT_SERVER"
     },
@@ -2611,7 +2611,7 @@ module.exports =
       "displayType": "directory",
       "isOverridable": false,
       "isVisible": true,
-      "serviceName": "WEBHCAT",
+      "serviceName": "HIVE",
       "filename": "webhcat-env.xml",
       "category": "Advanced webhcat-env"
     },
@@ -2625,7 +2625,7 @@ module.exports =
       "displayType": "directory",
       "isOverridable": false,
       "isVisible": true,
-      "serviceName": "WEBHCAT",
+      "serviceName": "HIVE",
       "filename": "webhcat-env.xml",
       "category": "Advanced webhcat-env"
     },
@@ -3334,7 +3334,7 @@ module.exports =
       "filename": "core-site.xml",
       "serviceName": "MISC",
       "category": "Users and Groups",
-      "belongsToService": ["HIVE", "WEBHCAT", "OOZIE", "FALCON"],
+      "belongsToService": ["HIVE", "OOZIE", "FALCON"],
       "index": 18
     },
     {
@@ -3442,9 +3442,9 @@ module.exports =
       "isOverridable": false,
       "isVisible": true,
       "serviceName": "MISC",
-      "filename": "hcatalog-env.xml",
+      "filename": "hive-env.xml",
       "category": "Users and Groups",
-      "belongsToService": ["HCATALOG"],
+      "belongsToService": ["HIVE"],
       "index": 6
     },
     {
@@ -3460,7 +3460,7 @@ module.exports =
       "serviceName": "MISC",
       "filename": "webhcat-env.xml",
       "category": "Users and Groups",
-      "belongsToService": ["WEBHCAT"],
+      "belongsToService": ["HIVE"],
       "index": 7
     },
     {

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-web/app/data/secure_configs.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/data/secure_configs.js b/ambari-web/app/data/secure_configs.js
index c0b4dba..c6079c8 100644
--- a/ambari-web/app/data/secure_configs.js
+++ b/ambari-web/app/data/secure_configs.js
@@ -64,20 +64,11 @@ module.exports = [
     displayName: 'Hive',
     filename: 'hive-site',
     configCategories: [
-      App.ServiceConfigCategory.create({ name: 'Hive Metastore', displayName: 'Hive Metastore and Hive Server 2'})
-    ],
-    sites: ['hive-site'],
-    configs: configProperties.filterProperty('serviceName', 'HIVE')
-  },
-  {
-    serviceName: 'WEBHCAT',
-    displayName: 'WebHCat',
-    filename: 'webhcat-site',
-    configCategories: [
+      App.ServiceConfigCategory.create({ name: 'Hive Metastore', displayName: 'Hive Metastore and Hive Server 2'}),
       App.ServiceConfigCategory.create({ name: 'WebHCat Server', displayName : 'WebHCat Server'})
     ],
-    sites: ['webhcat-site'],
-    configs: configProperties.filterProperty('serviceName', 'WEBHCAT')
+    sites: ['hive-site','webhcat-site'],
+    configs: configProperties.filterProperty('serviceName', 'HIVE')
   },
   {
     serviceName: 'HBASE',

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-web/app/data/secure_mapping.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/data/secure_mapping.js b/ambari-web/app/data/secure_mapping.js
index cc6cc40..c4bd6a4 100644
--- a/ambari-web/app/data/secure_mapping.js
+++ b/ambari-web/app/data/secure_mapping.js
@@ -371,7 +371,7 @@ module.exports = [
     "foreignKey": null,
     "value": "<templateName[0]>@<templateName[1]>",
     "filename": "webhcat-site.xml",
-    "serviceName": "WEBHCAT"
+    "serviceName": "HIVE"
   },
   {
     "name": "templeton.kerberos.keytab",
@@ -379,7 +379,7 @@ module.exports = [
     "foreignKey": null,
     "value": "<templateName[0]>",
     "filename": "webhcat-site.xml",
-    "serviceName": "WEBHCAT"
+    "serviceName": "HIVE"
   },
   {
     "name": "templeton.kerberos.secret",
@@ -387,7 +387,7 @@ module.exports = [
     "foreignKey": null,
     "value": "secret",
     "filename": "webhcat-site.xml",
-    "serviceName": "WEBHCAT"
+    "serviceName": "HIVE"
   },
   {
     "name": "templeton.hive.properties",
@@ -396,7 +396,7 @@ module.exports = [
     "value": "hive.metastore.local=false,hive.metastore.uris=thrift://<templateName[0]>:9083,hive." +
       "metastore.sasl.enabled=true,hive.metastore.execute.setugi=true,hive.metastore.warehouse.dir=/apps/hive/warehouse,hive.exec.mode.local.auto=false,hive.metastore.kerberos.principal=<templateName[1]>@<templateName[2]>",
     "filename": "webhcat-site.xml",
-    "serviceName": "WEBHCAT"
+    "serviceName": "HIVE"
   },
   {
     "name": "hbase.coprocessor.master.classes",
@@ -503,7 +503,7 @@ module.exports = [
     "foreignKey": ["webHCat_http_primary_name"],
     "value": "<templateName[0]>",
     "filename": "core-site.xml",
-    "serviceName": "WEBHCAT"
+    "serviceName": "HIVE"
   },
   {
     "name": "hadoop.proxyuser.<foreignKey[0]>.hosts",
@@ -511,7 +511,7 @@ module.exports = [
     "foreignKey": ["webHCat_http_primary_name"],
     "value": "<templateName[0]>",
     "filename": "core-site.xml",
-    "serviceName": "WEBHCAT"
+    "serviceName": "HIVE"
   }
 ];
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-web/app/data/secure_properties.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/data/secure_properties.js b/ambari-web/app/data/secure_properties.js
index a00783e..0fe0ca8 100644
--- a/ambari-web/app/data/secure_properties.js
+++ b/ambari-web/app/data/secure_properties.js
@@ -485,8 +485,6 @@ module.exports =
       "category": "TaskTracker",
       "component": "TASKTRACKER"
     },
-
-    //WEBHCAT
     {
       "id": "puppet var",
       "name": "webhcatserver_host",
@@ -497,7 +495,7 @@ module.exports =
       "displayType": "masterHost",
       "isVisible": true,
       "isOverridable": false,
-      "serviceName": "WEBHCAT",
+      "serviceName": "HIVE",
       "category": "WebHCat Server"
     },
     {
@@ -510,7 +508,7 @@ module.exports =
       "displayType": "principal",
       "isVisible": true,
       "isOverridable": false,
-      "serviceName": "WEBHCAT",
+      "serviceName": "HIVE",
       "category": "WebHCat Server"
     },
     {
@@ -523,7 +521,7 @@ module.exports =
       "displayType": "directory",
       "isVisible": true,
       "isOverridable": false,
-      "serviceName": "WEBHCAT",
+      "serviceName": "HIVE",
       "category": "WebHCat Server"
     },
     //HBASE

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-web/app/data/site_properties.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/data/site_properties.js b/ambari-web/app/data/site_properties.js
index 02fa670..60b997e 100644
--- a/ambari-web/app/data/site_properties.js
+++ b/ambari-web/app/data/site_properties.js
@@ -764,7 +764,7 @@ module.exports =
       "name": "templeton.hive.archive",
       "displayName": "templeton.hive.archive",
       "isRequired": false,
-      "serviceName": "WEBHCAT",
+      "serviceName": "HIVE",
       "category": "Advanced webhcat-site"
     },
     {
@@ -772,7 +772,7 @@ module.exports =
       "name": "templeton.pig.archive",
       "displayName": "templeton.pig.archive",
       "isRequired": false,
-      "serviceName": "WEBHCAT",
+      "serviceName": "HIVE",
       "category": "Advanced webhcat-site"
     },
     {
@@ -781,7 +781,7 @@ module.exports =
       "displayName": "templeton.zookeeper.hosts",
       "defaultValue": "",
       "displayType": "multiLine",
-      "serviceName": "WEBHCAT",
+      "serviceName": "HIVE",
       "category": "Advanced webhcat-site"
     },
   /**********************************************pig.properties*****************************************/
@@ -1721,7 +1721,7 @@ module.exports =
       "filename": "hive-env.xml",
       "category": "Advanced hive-env"
     },
-  /**********************************************WEBHCAT***************************************/
+  /**********************************************HIVE***************************************/
     {
       "id": "puppet var",
       "name": "webhcatserver_host",
@@ -1733,7 +1733,7 @@ module.exports =
       "isOverridable": false,
       "isRequiredByAgent": false,
       "isVisible": true,
-      "serviceName": "WEBHCAT",
+      "serviceName": "HIVE",
       "filename": "webhcat-env.xml",
       "category": "WEBHCAT_SERVER"
     },
@@ -1747,7 +1747,7 @@ module.exports =
       "displayType": "directory",
       "isOverridable": false,
       "isVisible": true,
-      "serviceName": "WEBHCAT",
+      "serviceName": "HIVE",
       "filename": "webhcat-env.xml",
       "category": "Advanced webhcat-env"
     },
@@ -1761,7 +1761,7 @@ module.exports =
       "displayType": "directory",
       "isOverridable": false,
       "isVisible": true,
-      "serviceName": "WEBHCAT",
+      "serviceName": "HIVE",
       "filename": "webhcat-env.xml",
       "category": "Advanced webhcat-env"
     },
@@ -2265,7 +2265,7 @@ module.exports =
       "filename": "hadoop-env.xml",
       "serviceName": "MISC",
       "category": "Users and Groups",
-      "belongsToService": ["HIVE", "WEBHCAT", "OOZIE"]
+      "belongsToService": ["HIVE", "OOZIE"]
     },
     {
       "id": "puppet var",
@@ -2365,9 +2365,9 @@ module.exports =
       "isOverridable": false,
       "isVisible": true,
       "serviceName": "MISC",
-      "filename": "hcatalog-env.xml",
+      "filename": "hive-env.xml",
       "category": "Users and Groups",
-      "belongsToService": ["HCATALOG"]
+      "belongsToService": ["HIVE"]
     },
     {
       "id": "puppet var",
@@ -2382,7 +2382,7 @@ module.exports =
       "serviceName": "MISC",
       "filename": "webhcat-env.xml",
       "category": "Users and Groups",
-      "belongsToService": ["WEBHCAT"]
+      "belongsToService": ["HIVE"]
     },
     {
       "id": "puppet var",

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-web/app/mixins/wizard/addSecurityConfigs.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/mixins/wizard/addSecurityConfigs.js b/ambari-web/app/mixins/wizard/addSecurityConfigs.js
index 8c3d5f0..d3b527a 100644
--- a/ambari-web/app/mixins/wizard/addSecurityConfigs.js
+++ b/ambari-web/app/mixins/wizard/addSecurityConfigs.js
@@ -51,7 +51,7 @@ App.AddSecurityConfigs = Em.Mixin.create({
       configName: 'hivemetastore_host'
     },
     {
-      serviceName: 'WEBHCAT',
+      serviceName: 'HIVE',
       componentName: 'WEBHCAT_SERVER',
       configName: 'webhcat_server'
     }

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-web/app/models/service.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/models/service.js b/ambari-web/app/models/service.js
index 03d2a2e..caa872e 100644
--- a/ambari-web/app/models/service.js
+++ b/ambari-web/app/models/service.js
@@ -97,18 +97,6 @@ App.Service = DS.Model.extend({
    */
   isRestartRequired: function () {
     var rhc = this.get('hostComponents').filterProperty('staleConfigs', true);
-
-    // HCatalog components are technically owned by Hive.
-    if (this.get('serviceName') == 'HIVE') {
-      var hcatService = App.Service.find('HCATALOG');
-      if (hcatService != null && hcatService.get('isLoaded')) {
-        var hcatStaleHcs = hcatService.get('hostComponents').filterProperty('staleConfigs', true);
-        if (hcatStaleHcs != null) {
-          rhc.pushObjects(hcatStaleHcs);
-        }
-      }
-    }
-
     var hc = {};
     rhc.forEach(function(_rhc) {
       var hostName = _rhc.get('hostName');

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-web/app/models/stack_service.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/models/stack_service.js b/ambari-web/app/models/stack_service.js
index edef8ce..55878ec 100644
--- a/ambari-web/app/models/stack_service.js
+++ b/ambari-web/app/models/stack_service.js
@@ -87,7 +87,7 @@ App.StackService = DS.Model.extend({
   }.property('coSelectedServices', 'serviceName'),
 
   isHiddenOnSelectServicePage: function () {
-    var hiddenServices = ['MAPREDUCE2', 'HCATALOG', 'WEBHCAT'];
+    var hiddenServices = ['MAPREDUCE2'];
     return hiddenServices.contains(this.get('serviceName'));
   }.property('serviceName'),
 
@@ -183,8 +183,6 @@ App.StackService.displayOrder = [
   'NAGIOS',
   'GANGLIA',
   'HIVE',
-  'HCATALOG',
-  'WEBHCAT',
   'HBASE',
   'PIG',
   'SQOOP',
@@ -198,8 +196,7 @@ App.StackService.displayOrder = [
 
 //@TODO: Write unit test for no two keys in the object should have any intersecting elements in their values
 App.StackService.coSelected = {
-  'YARN': ['MAPREDUCE2'],
-  'HIVE': ['HCATALOG', 'WEBHCAT']
+  'YARN': ['MAPREDUCE2']
 };
 
 
@@ -265,11 +262,7 @@ App.StackService.configCategories = function () {
       break;
     case 'HIVE':
       serviceConfigCategories.pushObjects([
-        App.ServiceConfigCategory.create({ name: 'HIVE_METASTORE', displayName: 'Hive Metastore'})
-      ]);
-      break;
-    case 'WEBHCAT':
-      serviceConfigCategories.pushObjects([
+        App.ServiceConfigCategory.create({ name: 'HIVE_METASTORE', displayName: 'Hive Metastore'}),
         App.ServiceConfigCategory.create({ name: 'WEBHCAT_SERVER', displayName: 'WebHCat Server'})
       ]);
       break;
@@ -323,8 +316,6 @@ App.StackService.configCategories = function () {
       break;
     case 'SQOOP':
       break;
-    case 'HCATALOG':
-      break;
     default:
       serviceConfigCategories.pushObjects([
         App.ServiceConfigCategory.create({ name: 'General', displayName: 'General'})

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-web/app/utils/batch_scheduled_requests.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/utils/batch_scheduled_requests.js b/ambari-web/app/utils/batch_scheduled_requests.js
index 6d89ddc..1ee7c70 100644
--- a/ambari-web/app/utils/batch_scheduled_requests.js
+++ b/ambari-web/app/utils/batch_scheduled_requests.js
@@ -72,7 +72,6 @@ module.exports = {
   restartAllServiceHostComponents: function(serviceName, staleConfigsOnly, query, runMmOperation) {
     var self = this;
     var context = staleConfigsOnly ? Em.I18n.t('rollingrestart.context.allWithStaleConfigsForSelectedService').format(serviceName) : Em.I18n.t('rollingrestart.context.allForSelectedService').format(serviceName);
-    var services = (serviceName === 'HIVE' && App.Service.find('HCATALOG').get('isLoaded')) ? ['HIVE', 'HCATALOG'] : [serviceName];
 
     if (runMmOperation) {
       this.turnOnOffPassiveRequest('ON', Em.I18n.t('passiveState.turnOnFor').format(serviceName), serviceName);

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-web/app/utils/config.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/utils/config.js b/ambari-web/app/utils/config.js
index 35ca305..f49c5aa 100644
--- a/ambari-web/app/utils/config.js
+++ b/ambari-web/app/utils/config.js
@@ -119,7 +119,7 @@ App.config = Em.Object.create({
     // HCatalog should be eventually made a part of Hive Service. See AMBARI-6302 description for further details
     var servicesWithConfigTypes = stackServices.filter(function (service) {
       var configtypes = service.get('configTypes');
-      return configtypes && !!Object.keys(configtypes).length && service.get('serviceName') != 'HCATALOG';
+      return configtypes && !!Object.keys(configtypes).length;
     }, this);
 
     var allTabs = servicesWithConfigTypes.concat(nonServiceTab);

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-web/app/utils/helper.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/utils/helper.js b/ambari-web/app/utils/helper.js
index 3be9849..a3ebfde 100644
--- a/ambari-web/app/utils/helper.js
+++ b/ambari-web/app/utils/helper.js
@@ -326,7 +326,6 @@ App.format = {
     'HBASE': 'HBase',
     'HBASE_REGIONSERVER': 'RegionServer',
     'HCAT': 'HCat',
-    'HCATALOG': 'HCatalog',
     'HDFS': 'HDFS',
     'HISTORYSERVER': 'History Server',
     'HIVE_SERVER': 'HiveServer2',

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-web/app/views/main/host/configs_service_menu.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views/main/host/configs_service_menu.js b/ambari-web/app/views/main/host/configs_service_menu.js
index 0f20a7d..4724573 100644
--- a/ambari-web/app/views/main/host/configs_service_menu.js
+++ b/ambari-web/app/views/main/host/configs_service_menu.js
@@ -29,7 +29,7 @@ App.MainHostServiceMenuView = Em.CollectionView.extend({
         var service = hc.get('service');
         if (service) {
           var serviceName = service.get('serviceName');
-          if(!App.get('services.noConfigTypes').concat('HCATALOG').contains(serviceName)){
+          if(!App.get('services.noConfigTypes').contains(serviceName)){
             if (!services.findProperty('serviceName', serviceName)) {
               services.push(service);
             }

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-web/app/views/main/service/info/summary.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views/main/service/info/summary.js b/ambari-web/app/views/main/service/info/summary.js
index 8d5ed4e..d90f835 100644
--- a/ambari-web/app/views/main/service/info/summary.js
+++ b/ambari-web/app/views/main/service/info/summary.js
@@ -88,7 +88,7 @@ App.MainServiceInfoSummaryView = Em.View.extend({
   noTemplateService: function () {
     var serviceName = this.get("service.serviceName");
     //services with only master components
-    return serviceName == "WEBHCAT" || serviceName == "NAGIOS";
+    return serviceName == "NAGIOS";
   }.property('controller.content'),
 
   hasManyServers: function () {

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-web/app/views/main/service/item.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views/main/service/item.js b/ambari-web/app/views/main/service/item.js
index f99e69a..2dda3c0 100644
--- a/ambari-web/app/views/main/service/item.js
+++ b/ambari-web/app/views/main/service/item.js
@@ -215,7 +215,7 @@ App.MainServiceItemView = Em.View.extend({
   }.property('maintenance'),
 
   hasConfigTab: function() {
-    return !App.get('services.noConfigTypes').concat('HCATALOG').contains(this.get('controller.content.serviceName'));
+    return !App.get('services.noConfigTypes').contains(this.get('controller.content.serviceName'));
   }.property('controller.content.serviceName','App.services.noConfigTypes'),
 
   didInsertElement: function () {

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-web/app/views/main/service/menu.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views/main/service/menu.js b/ambari-web/app/views/main/service/menu.js
index c329921..507a652 100644
--- a/ambari-web/app/views/main/service/menu.js
+++ b/ambari-web/app/views/main/service/menu.js
@@ -20,7 +20,7 @@ var App = require('app');
 var misc = require('utils/misc');
 
 App.MainServiceMenuView = Em.CollectionView.extend({
-  disabledServices: ['HCATALOG'],
+  disabledServices: [],
 
   content:function () {
     var items = App.router.get('mainServiceController.content').filter(function(item){
@@ -75,7 +75,7 @@ App.MainServiceMenuView = Em.CollectionView.extend({
     }.property('content.criticalAlertsCount'),
 
     isConfigurable: function () {
-      return !App.get('services.noConfigTypes').concat('HCATALOG').contains(this.get('content.serviceName'));
+      return !App.get('services.noConfigTypes').contains(this.get('content.serviceName'));
     }.property('App.services.noConfigTypes','content.serviceName'),
 
     link: function() {
@@ -120,7 +120,7 @@ App.MainServiceMenuView = Em.CollectionView.extend({
 });
 
 App.TopNavServiceMenuView = Em.CollectionView.extend({
-  disabledServices: ['HCATALOG'],
+  disabledServices: [],
 
   content:function () {
     var items = App.router.get('mainServiceController.content').filter(function(item){
@@ -173,7 +173,7 @@ App.TopNavServiceMenuView = Em.CollectionView.extend({
     }.property('content.criticalAlertsCount'),
 
     isConfigurable: function () {
-      return !App.get('services.noConfigTypes').concat('HCATALOG').contains(this.get('content.serviceName'));
+      return !App.get('services.noConfigTypes').contains(this.get('content.serviceName'));
     }.property('App.services.noConfigTypes','content.serviceName'),
 
     link: function() {

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-web/test/controllers/main/admin/security/add/addSecurity_controller_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/controllers/main/admin/security/add/addSecurity_controller_test.js b/ambari-web/test/controllers/main/admin/security/add/addSecurity_controller_test.js
index 9278148..cd4f4a2 100644
--- a/ambari-web/test/controllers/main/admin/security/add/addSecurity_controller_test.js
+++ b/ambari-web/test/controllers/main/admin/security/add/addSecurity_controller_test.js
@@ -63,7 +63,6 @@ describe('App.AddSecurityController', function () {
         "HDFS",
         "MAPREDUCE",
         "HIVE",
-        "WEBHCAT",
         "HBASE",
         "ZOOKEEPER",
         "OOZIE",
@@ -82,7 +81,6 @@ describe('App.AddSecurityController', function () {
         "MAPREDUCE2",
         "YARN",
         "HIVE",
-        "WEBHCAT",
         "HBASE",
         "ZOOKEEPER",
         "OOZIE",

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-web/test/controllers/main/service/info/config_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/controllers/main/service/info/config_test.js b/ambari-web/test/controllers/main/service/info/config_test.js
index 56f0aeb..afb2a08 100644
--- a/ambari-web/test/controllers/main/service/info/config_test.js
+++ b/ambari-web/test/controllers/main/service/info/config_test.js
@@ -717,18 +717,18 @@ describe("App.MainServiceInfoConfigsController", function () {
     var tests = [
       {
         stepConfigs: [Em.Object.create({
-          serviceName: "WEBHCAT",
+          serviceName: "HIVE",
           configs: []
         })],
         content: Em.Object.create({
-          serviceName: "WEBHCAT"
+          serviceName: "HIVE"
         }),
         m: "add dynamic property",
         addDynamic: true
       },
       {
         stepConfigs: [Em.Object.create({
-          serviceName: "WEBHCAT",
+          serviceName: "HIVE",
           configs: [
             Em.Object.create({
               name: "templeton.hive.properties"
@@ -736,7 +736,7 @@ describe("App.MainServiceInfoConfigsController", function () {
           ]
         })],
         content: Em.Object.create({
-          serviceName: "WEBHCAT"
+          serviceName: "HIVE"
         }),
         m: "don't add dynamic property (already included)",
         addDynamic: false

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-web/test/controllers/wizard/step4_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/controllers/wizard/step4_test.js b/ambari-web/test/controllers/wizard/step4_test.js
index 7344853..a8ec9b9 100644
--- a/ambari-web/test/controllers/wizard/step4_test.js
+++ b/ambari-web/test/controllers/wizard/step4_test.js
@@ -24,8 +24,8 @@ require('controllers/wizard/step4_controller');
 describe('App.WizardStep4Controller', function () {
 
   var services = [
-    'HDFS', 'MAPREDUCE', 'NAGIOS', 'GANGLIA', 'OOZIE', 'HIVE', 'HBASE', 'PIG', 'SCOOP', 'ZOOKEEPER', 'HCATALOG',
-    'WEBHCAT', 'YARN', 'MAPREDUCE2', 'FALCON', 'TEZ', 'STORM'
+    'HDFS', 'MAPREDUCE', 'NAGIOS', 'GANGLIA', 'OOZIE', 'HIVE', 'HBASE', 'PIG', 'SCOOP', 'ZOOKEEPER',
+    'YARN', 'MAPREDUCE2', 'FALCON', 'TEZ', 'STORM'
   ];
 
   var controller = App.WizardStep4Controller.create();
@@ -135,31 +135,29 @@ describe('App.WizardStep4Controller', function () {
   describe('#setGroupedServices()', function () {
     var testCases = [
       {
-        title: 'should set HCATALOG and WEBHCAT isSelected to true when HIVE is selected',
+        title: 'should set MapReduce2 isSelected to true when YARN is selected',
         condition: {
+          'YARN': true,
           'HBASE': true,
           'ZOOKEEPER': true,
           'HIVE': true,
-          'HCATALOG': true,
-          'WEBHCAT': true
+          'MAPREDUCE2': true
         },
         result: {
-          'HCATALOG': true,
-          'WEBHCAT': true
+          'MAPREDUCE2': true
         }
       },
       {
-        title: 'should set HCATALOG and WEBHCAT isSelected to false when HIVE is not selected',
+        title: 'should set MapReduce2 isSelected to false when YARN is not selected',
         condition: {
+          'YARN': false,
           'HBASE': true,
           'ZOOKEEPER': true,
           'HIVE': false,
-          'HCATALOG': true,
-          'WEBHCAT': true
+          'MAPREDUCE2': true
         },
         result: {
-          'HCATALOG': false,
-          'WEBHCAT': false
+          'MAPREDUCE2': false
         }
       },
       {
@@ -168,15 +166,11 @@ describe('App.WizardStep4Controller', function () {
           'HBASE': true,
           'ZOOKEEPER': true,
           'HIVE': false,
-          'HCATALOG': true,
-          'WEBHCAT': true,
           'YARN': true,
           'MAPREDUCE2': true
         },
         result: {
-          'MAPREDUCE2': true,
-          'HCATALOG': false,
-          'WEBHCAT': false
+          'MAPREDUCE2': true
         }
       },
       {
@@ -185,15 +179,11 @@ describe('App.WizardStep4Controller', function () {
           'HBASE': true,
           'ZOOKEEPER': true,
           'HIVE': true,
-          'HCATALOG': true,
-          'WEBHCAT': true,
           'YARN': false,
           'MAPREDUCE2': true
         },
         result: {
-          'MAPREDUCE2': false,
-          'HCATALOG': true,
-          'WEBHCAT': true
+          'MAPREDUCE2': false
         }
       }
     ];

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-web/test/controllers/wizard/step8_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/controllers/wizard/step8_test.js b/ambari-web/test/controllers/wizard/step8_test.js
index b994ebb..b6488a9 100644
--- a/ambari-web/test/controllers/wizard/step8_test.js
+++ b/ambari-web/test/controllers/wizard/step8_test.js
@@ -101,13 +101,13 @@ describe('App.WizardStep8Controller', function () {
       {selectedServices: Em.A(['MAPREDUCE2', 'YARN', 'HBASE']), e: 7},
       {selectedServices: Em.A(['MAPREDUCE2', 'YARN', 'HBASE', 'OOZIE']), e: 9},
       {selectedServices: Em.A(['MAPREDUCE2', 'YARN', 'HBASE', 'OOZIE', 'HIVE']), e: 12},
-      {selectedServices: Em.A(['MAPREDUCE2', 'YARN', 'HBASE', 'OOZIE', 'HIVE', 'WEBHCAT']), e: 13},
-      {selectedServices: Em.A(['MAPREDUCE2', 'YARN', 'HBASE', 'OOZIE', 'HIVE', 'WEBHCAT', 'HUE']), e: 14},
-      {selectedServices: Em.A(['MAPREDUCE2', 'YARN', 'HBASE', 'OOZIE', 'HIVE', 'WEBHCAT', 'HUE', 'PIG']), e: 15},
-      {selectedServices: Em.A(['MAPREDUCE2', 'YARN', 'HBASE', 'OOZIE', 'HIVE', 'WEBHCAT', 'HUE', 'PIG', 'FALCON']), e: 17},
-      {selectedServices: Em.A(['MAPREDUCE2', 'YARN', 'HBASE', 'OOZIE', 'HIVE', 'WEBHCAT', 'HUE', 'PIG', 'FALCON', 'STORM']), e: 18},
-      {selectedServices: Em.A(['MAPREDUCE2', 'YARN', 'HBASE', 'OOZIE', 'HIVE', 'WEBHCAT', 'HUE', 'PIG', 'FALCON', 'STORM', 'TEZ']), e: 19},
-      {selectedServices: Em.A(['MAPREDUCE2', 'YARN', 'HBASE', 'OOZIE', 'HIVE', 'WEBHCAT', 'HUE', 'PIG', 'FALCON', 'STORM', 'TEZ', 'ZOOKEEPER']), e: 21}
+      {selectedServices: Em.A(['MAPREDUCE2', 'YARN', 'HBASE', 'OOZIE', 'HIVE']), e: 13},
+      {selectedServices: Em.A(['MAPREDUCE2', 'YARN', 'HBASE', 'OOZIE', 'HIVE', 'HUE']), e: 14},
+      {selectedServices: Em.A(['MAPREDUCE2', 'YARN', 'HBASE', 'OOZIE', 'HIVE', 'HUE', 'PIG']), e: 15},
+      {selectedServices: Em.A(['MAPREDUCE2', 'YARN', 'HBASE', 'OOZIE', 'HIVE', 'HUE', 'PIG', 'FALCON']), e: 17},
+      {selectedServices: Em.A(['MAPREDUCE2', 'YARN', 'HBASE', 'OOZIE', 'HIVE', 'HUE', 'PIG', 'FALCON', 'STORM']), e: 18},
+      {selectedServices: Em.A(['MAPREDUCE2', 'YARN', 'HBASE', 'OOZIE', 'HIVE', 'HUE', 'PIG', 'FALCON', 'STORM', 'TEZ']), e: 19},
+      {selectedServices: Em.A(['MAPREDUCE2', 'YARN', 'HBASE', 'OOZIE', 'HIVE', 'HUE', 'PIG', 'FALCON', 'STORM', 'TEZ', 'ZOOKEEPER']), e: 21}
     ]);
 
     tests.forEach(function (test) {

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-web/test/models/service_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/models/service_test.js b/ambari-web/test/models/service_test.js
index 2467932..a289825 100644
--- a/ambari-web/test/models/service_test.js
+++ b/ambari-web/test/models/service_test.js
@@ -92,14 +92,6 @@ var service,
       configurable: true
     },
     {
-      name: 'HCATALOG',
-      clientOnly: true
-    },
-    {
-      name: 'WEBHCAT',
-      configurable: true
-    },
-    {
       name: 'FLUME',
       configurable: true
     },

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-web/test/models/stack_service_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/models/stack_service_test.js b/ambari-web/test/models/stack_service_test.js
index ef3e587..3500343 100644
--- a/ambari-web/test/models/stack_service_test.js
+++ b/ambari-web/test/models/stack_service_test.js
@@ -114,14 +114,6 @@ describe('App.StackService', function () {
       {
         serviceName: 'MAPREDUCE2',
         result: true
-      },
-      {
-        serviceName: 'HCATALOG',
-        result: true
-      },
-      {
-        serviceName: 'WEBHCAT',
-        result: true
       }
     ];
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-web/test/service_components.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/service_components.js b/ambari-web/test/service_components.js
index 091048c..27e198c 100644
--- a/ambari-web/test/service_components.js
+++ b/ambari-web/test/service_components.js
@@ -404,53 +404,6 @@ module.exports = {
       ]
     },
     {
-      "href" : "http://c6401.ambari.apache.org:8080/api/v1/stacks2/HDP/versions/2.1/stackServices/HCATALOG",
-      "StackServices" : {
-        "comments" : "This is comment for HCATALOG service",
-        "custom_commands" : [ ],
-        "service_check_supported" : true,
-        "service_name" : "HCATALOG",
-        "display_name" : "HCatalog",
-        "service_version" : "0.12.0.2.1",
-        "stack_name" : "HDP",
-        "stack_version" : "2.1",
-        "user_name" : null,
-        "required_services" : [
-          "HIVE"
-        ],
-        "config_types" : {
-          "hive-env" : {
-            "supports" : {
-              "final" : "false"
-            }
-          },
-          "hive-site" : {
-            "supports" : {
-              "final" : "true"
-            }
-          }
-        }
-      },
-      "serviceComponents" : [
-        {
-          "href" : "http://c6401.ambari.apache.org:8080/api/v1/stacks2/HDP/versions/2.1/stackServices/HCATALOG/serviceComponents/HCAT",
-          "StackServiceComponents" : {
-            "cardinality" : null,
-            "component_category" : "CLIENT",
-            "component_name" : "HCAT",
-            "display_name" : "HCat",
-            "custom_commands" : [ ],
-            "is_client" : true,
-            "is_master" : false,
-            "service_name" : "HCATALOG",
-            "stack_name" : "HDP",
-            "stack_version" : "2.1"
-          },
-          "dependencies" : [ ]
-        }
-      ]
-    },
-    {
       "href" : "http://c6401.ambari.apache.org:8080/api/v1/stacks2/HDP/versions/2.1/stackServices/HDFS",
       "StackServices" : {
         "comments" : "Apache Hadoop Distributed File System",
@@ -733,6 +686,73 @@ module.exports = {
           ]
         },
         {
+          "href" : "http://c6401.ambari.apache.org:8080/api/v1/stacks2/HDP/versions/2.1/stackServices/WEBHCAT/serviceComponents/WEBHCAT_SERVER",
+          "StackServiceComponents" : {
+            "cardinality" : "1",
+            "component_category" : "MASTER",
+            "component_name" : "WEBHCAT_SERVER",
+            "display_name" : "WebHCat Server",
+            "custom_commands" : [ ],
+            "is_client" : false,
+            "is_master" : true,
+            "service_name" : "HIVE",
+            "stack_name" : "HDP",
+            "stack_version" : "2.1"
+          },
+          "dependencies" : [
+            {
+              "href" : "http://c6401.ambari.apache.org:8080/api/v1/stacks2/HDP/versions/2.1/stackServices/HIVE/serviceComponents/WEBHCAT_SERVER/dependencies/HDFS_CLIENT",
+              "Dependencies" : {
+                "component_name" : "HDFS_CLIENT",
+                "dependent_component_name" : "WEBHCAT_SERVER",
+                "dependent_service_name" : "WEBHCAT",
+                "stack_name" : "HDP",
+                "stack_version" : "2.1"
+              }
+            },
+            {
+              "href" : "http://c6401.ambari.apache.org:8080/api/v1/stacks2/HDP/versions/2.1/stackServices/HIVE/serviceComponents/WEBHCAT_SERVER/dependencies/MAPREDUCE2_CLIENT",
+              "Dependencies" : {
+                "component_name" : "MAPREDUCE2_CLIENT",
+                "dependent_component_name" : "WEBHCAT_SERVER",
+                "dependent_service_name" : "WEBHCAT",
+                "stack_name" : "HDP",
+                "stack_version" : "2.1"
+              }
+            },
+            {
+              "href" : "http://c6401.ambari.apache.org:8080/api/v1/stacks2/HDP/versions/2.1/stackServices/WEBHCAT/serviceComponents/WEBHCAT_SERVER/dependencies/YARN_CLIENT",
+              "Dependencies" : {
+                "component_name" : "YARN_CLIENT",
+                "dependent_component_name" : "WEBHCAT_SERVER",
+                "dependent_service_name" : "WEBHCAT",
+                "stack_name" : "HDP",
+                "stack_version" : "2.1"
+              }
+            },
+            {
+              "href" : "http://c6401.ambari.apache.org:8080/api/v1/stacks2/HDP/versions/2.1/stackServices/WEBHCAT/serviceComponents/WEBHCAT_SERVER/dependencies/ZOOKEEPER_CLIENT",
+              "Dependencies" : {
+                "component_name" : "ZOOKEEPER_CLIENT",
+                "dependent_component_name" : "WEBHCAT_SERVER",
+                "dependent_service_name" : "WEBHCAT",
+                "stack_name" : "HDP",
+                "stack_version" : "2.1"
+              }
+            },
+            {
+              "href" : "http://c6401.ambari.apache.org:8080/api/v1/stacks2/HDP/versions/2.1/stackServices/WEBHCAT/serviceComponents/WEBHCAT_SERVER/dependencies/ZOOKEEPER_SERVER",
+              "Dependencies" : {
+                "component_name" : "ZOOKEEPER_SERVER",
+                "dependent_component_name" : "WEBHCAT_SERVER",
+                "dependent_service_name" : "WEBHCAT",
+                "stack_name" : "HDP",
+                "stack_version" : "2.1"
+              }
+            }
+          ]
+        },
+        {
           "href" : "http://c6401.ambari.apache.org:8080/api/v1/stacks2/HDP/versions/2.1/stackServices/HIVE/serviceComponents/MYSQL_SERVER",
           "StackServiceComponents" : {
             "cardinality" : "0-1",
@@ -747,6 +767,22 @@ module.exports = {
             "stack_version" : "2.1"
           },
           "dependencies" : [ ]
+        },
+        {
+          "href" : "http://c6401.ambari.apache.org:8080/api/v1/stacks2/HDP/versions/2.1/stackServices/HIVE/serviceComponents/HCAT",
+          "StackServiceComponents" : {
+            "cardinality" : null,
+            "component_category" : "CLIENT",
+            "component_name" : "HCAT",
+            "display_name" : "HCat",
+            "custom_commands" : [ ],
+            "is_client" : true,
+            "is_master" : false,
+            "service_name" : "HIVE",
+            "stack_name" : "HDP",
+            "stack_version" : "2.1"
+          },
+          "dependencies" : [ ]
         }
       ]
     },
@@ -1349,105 +1385,6 @@ module.exports = {
       ]
     },
     {
-      "href" : "http://c6401.ambari.apache.org:8080/api/v1/stacks2/HDP/versions/2.1/stackServices/WEBHCAT",
-      "StackServices" : {
-        "comments" : "This is comment for WEBHCAT service",
-        "custom_commands" : [ ],
-        "service_check_supported" : true,
-        "service_name" : "WEBHCAT",
-        "display_name" : "WebHCat",
-        "service_version" : "0.13.0.2.1",
-        "stack_name" : "HDP",
-        "stack_version" : "2.1",
-        "user_name" : null,
-        "required_services" : [
-          "HIVE",
-          "ZOOKEEPER"
-        ],
-        "config_types" : {
-          "webhcat-env" : {
-            "supports" : {
-              "final" : "false"
-            }
-          },
-          "webhcat-site" : {
-            "supports" : {
-              "final" : "true"
-            }
-          }
-        }
-      },
-      "serviceComponents" : [
-        {
-          "href" : "http://c6401.ambari.apache.org:8080/api/v1/stacks2/HDP/versions/2.1/stackServices/WEBHCAT/serviceComponents/WEBHCAT_SERVER",
-          "StackServiceComponents" : {
-            "cardinality" : "1",
-            "component_category" : "MASTER",
-            "component_name" : "WEBHCAT_SERVER",
-            "display_name" : "WebHCat Server",
-            "custom_commands" : [ ],
-            "is_client" : false,
-            "is_master" : true,
-            "service_name" : "WEBHCAT",
-            "stack_name" : "HDP",
-            "stack_version" : "2.1"
-          },
-          "dependencies" : [
-            {
-              "href" : "http://c6401.ambari.apache.org:8080/api/v1/stacks2/HDP/versions/2.1/stackServices/WEBHCAT/serviceComponents/WEBHCAT_SERVER/dependencies/HDFS_CLIENT",
-              "Dependencies" : {
-                "component_name" : "HDFS_CLIENT",
-                "dependent_component_name" : "WEBHCAT_SERVER",
-                "dependent_service_name" : "WEBHCAT",
-                "stack_name" : "HDP",
-                "stack_version" : "2.1"
-              }
-            },
-            {
-              "href" : "http://c6401.ambari.apache.org:8080/api/v1/stacks2/HDP/versions/2.1/stackServices/WEBHCAT/serviceComponents/WEBHCAT_SERVER/dependencies/MAPREDUCE2_CLIENT",
-              "Dependencies" : {
-                "component_name" : "MAPREDUCE2_CLIENT",
-                "dependent_component_name" : "WEBHCAT_SERVER",
-                "dependent_service_name" : "WEBHCAT",
-                "stack_name" : "HDP",
-                "stack_version" : "2.1"
-              }
-            },
-            {
-              "href" : "http://c6401.ambari.apache.org:8080/api/v1/stacks2/HDP/versions/2.1/stackServices/WEBHCAT/serviceComponents/WEBHCAT_SERVER/dependencies/YARN_CLIENT",
-              "Dependencies" : {
-                "component_name" : "YARN_CLIENT",
-                "dependent_component_name" : "WEBHCAT_SERVER",
-                "dependent_service_name" : "WEBHCAT",
-                "stack_name" : "HDP",
-                "stack_version" : "2.1"
-              }
-            },
-            {
-              "href" : "http://c6401.ambari.apache.org:8080/api/v1/stacks2/HDP/versions/2.1/stackServices/WEBHCAT/serviceComponents/WEBHCAT_SERVER/dependencies/ZOOKEEPER_CLIENT",
-              "Dependencies" : {
-                "component_name" : "ZOOKEEPER_CLIENT",
-                "dependent_component_name" : "WEBHCAT_SERVER",
-                "dependent_service_name" : "WEBHCAT",
-                "stack_name" : "HDP",
-                "stack_version" : "2.1"
-              }
-            },
-            {
-              "href" : "http://c6401.ambari.apache.org:8080/api/v1/stacks2/HDP/versions/2.1/stackServices/WEBHCAT/serviceComponents/WEBHCAT_SERVER/dependencies/ZOOKEEPER_SERVER",
-              "Dependencies" : {
-                "component_name" : "ZOOKEEPER_SERVER",
-                "dependent_component_name" : "WEBHCAT_SERVER",
-                "dependent_service_name" : "WEBHCAT",
-                "stack_name" : "HDP",
-                "stack_version" : "2.1"
-              }
-            }
-          ]
-        }
-      ]
-    },
-    {
       "href" : "http://c6401.ambari.apache.org:8080/api/v1/stacks2/HDP/versions/2.1/stackServices/YARN",
       "StackServices" : {
         "comments" : "Apache Hadoop NextGen MapReduce (YARN)",

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-web/test/utils/helper_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/utils/helper_test.js b/ambari-web/test/utils/helper_test.js
index 4d2c3b2..bd90d09 100644
--- a/ambari-web/test/utils/helper_test.js
+++ b/ambari-web/test/utils/helper_test.js
@@ -229,7 +229,6 @@ describe('utils/helper', function() {
           'HBASE_REGIONSERVER': 'RegionServer',
           'HBASE_SERVICE_CHECK': 'HBase Service Check',
           'HCAT': 'HCat',
-          'HCATALOG': 'HCatalog',
           'HCAT_SERVICE_CHECK': 'HCat Service Check',
           'HDFS': 'HDFS',
           'HDFS_CLIENT': 'HDFS Client',

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/contrib/addons/src/addOns/nagios/scripts/nagios_alerts.php
----------------------------------------------------------------------
diff --git a/contrib/addons/src/addOns/nagios/scripts/nagios_alerts.php b/contrib/addons/src/addOns/nagios/scripts/nagios_alerts.php
index d15b023..067b7ff 100644
--- a/contrib/addons/src/addOns/nagios/scripts/nagios_alerts.php
+++ b/contrib/addons/src/addOns/nagios/scripts/nagios_alerts.php
@@ -197,9 +197,9 @@ function hdp_mon_generate_response( $response_data )
         continue;
       }
       if (getParameter($object, "service_description") == WEBHCAT_SERVICE_CHECK) {
-        $services_object["WEBHCAT"] = getParameter($object, "last_hard_state");
-        if ($services_object["WEBHCAT"] >= 1) {
-          $services_object["WEBHCAT"] = 1;
+          $services_object["HIVE"] = getParameter($object, "last_hard_state");
+        if ($services_object["HIVE"] >= 1) {
+            $services_object["HIVE"] = 1;
         }
         continue;
       }
@@ -381,6 +381,7 @@ function hdp_mon_generate_response( $response_data )
         break;
       case "HIVE-METASTORE":
       case "HIVE-SERVER":
+      case "WEBHCAT":
         $pieces[0] = "HIVE";
         break;
       case "ZKSERVERS":
@@ -413,7 +414,6 @@ function hdp_mon_generate_response( $response_data )
       case "HBASE":
       case "ZOOKEEPER":
       case "OOZIE":
-      case "WEBHCAT":
       case "GANGLIA":
       case "STORM":
       case "FALCON":

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/contrib/ambari-scom/ambari-scom-server/src/main/java/org/apache/ambari/msi/ClusterDefinition.java
----------------------------------------------------------------------
diff --git a/contrib/ambari-scom/ambari-scom-server/src/main/java/org/apache/ambari/msi/ClusterDefinition.java b/contrib/ambari-scom/ambari-scom-server/src/main/java/org/apache/ambari/msi/ClusterDefinition.java
index c52a27e..a6528b2 100644
--- a/contrib/ambari-scom/ambari-scom-server/src/main/java/org/apache/ambari/msi/ClusterDefinition.java
+++ b/contrib/ambari-scom/ambari-scom-server/src/main/java/org/apache/ambari/msi/ClusterDefinition.java
@@ -167,7 +167,7 @@ public class ClusterDefinition {
     componentServiceMap.put("HIVE_METASTORE",     "HIVE");
     componentServiceMap.put("HIVE_CLIENT",        "HIVE");
     componentServiceMap.put("OOZIE_SERVER",       "OOZIE");
-    componentServiceMap.put("WEBHCAT_SERVER",     "WEBHCAT");
+    componentServiceMap.put("WEBHCAT_SERVER",     "HIVE");
     componentServiceMap.put("FLUME_SERVER",       "FLUME");
     componentServiceMap.put("HBASE_MASTER",       "HBASE");
     componentServiceMap.put("HBASE_REGIONSERVER", "HBASE");

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/contrib/ambari-scom/ambari-scom-server/src/test/java/org/apache/ambari/msi/ClusterDefinitionTest.java
----------------------------------------------------------------------
diff --git a/contrib/ambari-scom/ambari-scom-server/src/test/java/org/apache/ambari/msi/ClusterDefinitionTest.java b/contrib/ambari-scom/ambari-scom-server/src/test/java/org/apache/ambari/msi/ClusterDefinitionTest.java
index 0db728e..902b812 100644
--- a/contrib/ambari-scom/ambari-scom-server/src/test/java/org/apache/ambari/msi/ClusterDefinitionTest.java
+++ b/contrib/ambari-scom/ambari-scom-server/src/test/java/org/apache/ambari/msi/ClusterDefinitionTest.java
@@ -45,7 +45,6 @@ public class ClusterDefinitionTest {
     Assert.assertTrue(services.contains("HBASE"));
     Assert.assertTrue(services.contains("ZOOKEEPER"));
     Assert.assertTrue(services.contains("HIVE"));
-    Assert.assertTrue(services.contains("WEBHCAT"));
   }
 
   @Test
@@ -88,9 +87,6 @@ public class ClusterDefinitionTest {
     components = clusterDefinition.getComponents("OOZIE");
     Assert.assertTrue(components.contains("OOZIE_SERVER"));
 
-    components = clusterDefinition.getComponents("WEBHCAT");
-    Assert.assertTrue(components.contains("WEBHCAT_SERVER"));
-
     components = clusterDefinition.getComponents("HBASE");
     Assert.assertTrue(components.contains("HBASE_MASTER"));
     Assert.assertTrue(components.contains("HBASE_REGIONSERVER"));

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/contrib/ambari-scom/management-pack/Hadoop_MP/HadoopMp/Modules/DataSources/Discovery/Scripts/HostComponentsDiscovery.ps1
----------------------------------------------------------------------
diff --git a/contrib/ambari-scom/management-pack/Hadoop_MP/HadoopMp/Modules/DataSources/Discovery/Scripts/HostComponentsDiscovery.ps1 b/contrib/ambari-scom/management-pack/Hadoop_MP/HadoopMp/Modules/DataSources/Discovery/Scripts/HostComponentsDiscovery.ps1
index 09a747f..614c463 100644
--- a/contrib/ambari-scom/management-pack/Hadoop_MP/HadoopMp/Modules/DataSources/Discovery/Scripts/HostComponentsDiscovery.ps1
+++ b/contrib/ambari-scom/management-pack/Hadoop_MP/HadoopMp/Modules/DataSources/Discovery/Scripts/HostComponentsDiscovery.ps1
@@ -91,9 +91,8 @@ function GetParentServiceName($componentName) {
     switch ($componentName) {
         { 'namenode', 'secondary_namenode', 'datanode', 'zkfc', 'journalnode'  -contains $_ } { 'HDFS' }
         { 'jobtracker', 'tasktracker' -contains $_ } { 'MAPREDUCE' }
-        { 'hive_server', 'hive_metastore', 'hive_client' -contains $_ } { 'HIVE' }
+        { 'hive_server', 'hive_metastore','webhcat_server','hive_client' -contains $_ } { 'HIVE' }
         'templeton' { 'TEMPLETON' }
-        'webhcat_server' { 'WEBHCAT' }
         'oozie_server' { 'OOZIE' }
         'pig' { 'PIG' }
         'sqoop' { 'SQOOP' }

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/contrib/views/slider/src/main/resources/ui/app/assets/data/resource/service_status.json
----------------------------------------------------------------------
diff --git a/contrib/views/slider/src/main/resources/ui/app/assets/data/resource/service_status.json b/contrib/views/slider/src/main/resources/ui/app/assets/data/resource/service_status.json
index 59dd8db..861f7cd 100644
--- a/contrib/views/slider/src/main/resources/ui/app/assets/data/resource/service_status.json
+++ b/contrib/views/slider/src/main/resources/ui/app/assets/data/resource/service_status.json
@@ -27,12 +27,6 @@
     },
     {
       "ServiceInfo" : {
-        "service_name" : "HCATALOG",
-        "state" : "INSTALLED"
-      }
-    },
-    {
-      "ServiceInfo" : {
         "service_name" : "HDFS",
         "state" : "STARTED"
       }
@@ -87,12 +81,6 @@
     },
     {
       "ServiceInfo" : {
-        "service_name" : "WEBHCAT",
-        "state" : "STARTED"
-      }
-    },
-    {
-      "ServiceInfo" : {
         "service_name" : "YARN",
         "state" : "STARTED"
       }


[27/27] git commit: Merge branch 'trunk' into branch-alerts-dev

Posted by jo...@apache.org.
Merge branch 'trunk' into branch-alerts-dev

Conflicts:
	ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog170.java
	ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/17b8e799
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/17b8e799
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/17b8e799

Branch: refs/heads/branch-alerts-dev
Commit: 17b8e799048b96c7fb1074a21c52e2e7ecaaf607
Parents: 760bedf bd04a28
Author: Jonathan Hurley <jh...@hortonworks.com>
Authored: Tue Sep 16 08:50:14 2014 -0400
Committer: Jonathan Hurley <jh...@hortonworks.com>
Committed: Tue Sep 16 08:50:14 2014 -0400

----------------------------------------------------------------------
 .../ui/admin-web/app/views/leftNavbar.html      |   2 +-
 ambari-agent/src/main/package/rpm/preremove.sh  |   2 +-
 .../ambari_agent/TestActualConfigHandler.py     |   6 +-
 .../test/python/ambari_agent/TestLiveStatus.py  |   4 +-
 .../TestExecuteHadoopResource.py                |  37 ++-
 ambari-server/docs/api/v1/services.md           |   7 -
 .../stackadvisor/StackAdvisorRunner.java        |  10 +-
 .../server/controller/AmbariHandlerList.java    |  33 +-
 .../AmbariPrivilegeResourceProvider.java        |   2 +-
 .../internal/BaseBlueprintProcessor.java        |   2 +-
 .../BlueprintConfigurationProcessor.java        |  30 +-
 .../RecommendationResourceProvider.java         |   6 +-
 .../internal/ValidationResourceProvider.java    |   6 +-
 .../internal/ViewInstanceResourceProvider.java  |  10 +-
 .../ViewPermissionResourceProvider.java         |   4 +-
 .../internal/ViewPrivilegeResourceProvider.java |   8 +-
 .../ambari/server/metadata/ActionMetadata.java  |   1 -
 .../ambari/server/orm/entities/ViewEntity.java  |   8 +-
 .../org/apache/ambari/server/state/Service.java |   8 +-
 .../server/upgrade/UpgradeCatalog170.java       | 142 +++++++-
 .../apache/ambari/server/view/ViewRegistry.java |  10 +-
 ambari-server/src/main/package/rpm/preremove.sh |   2 +-
 .../custom_actions/validate_configs.py          |   2 -
 .../scripts/shared_initialization.py            |   3 +-
 .../stacks/HDP/1.3.2/role_command_order.json    |   4 +-
 .../services/HIVE/configuration/webhcat-env.xml |  54 +++
 .../HIVE/configuration/webhcat-site.xml         | 156 +++++++++
 .../stacks/HDP/1.3.2/services/HIVE/metainfo.xml | 154 +++++----
 .../HIVE/package/files/templetonSmoke.sh        |  96 ++++++
 .../services/HIVE/package/scripts/params.py     |  31 +-
 .../HIVE/package/scripts/service_check.py       |   2 +
 .../HIVE/package/scripts/status_params.py       |   3 +
 .../services/HIVE/package/scripts/webhcat.py    | 107 ++++++
 .../HIVE/package/scripts/webhcat_server.py      |  53 +++
 .../HIVE/package/scripts/webhcat_service.py     |  40 +++
 .../package/scripts/webhcat_service_check.py    |  42 +++
 .../HDP/1.3.2/services/NAGIOS/metainfo.xml      |   2 +-
 .../templates/hadoop-servicegroups.cfg.j2       |   6 -
 .../package/templates/hadoop-services.cfg.j2    |   2 +-
 .../WEBHCAT/configuration/webhcat-env.xml       |  54 ---
 .../WEBHCAT/configuration/webhcat-site.xml      | 156 ---------
 .../HDP/1.3.2/services/WEBHCAT/metainfo.xml     | 103 ------
 .../WEBHCAT/package/files/templetonSmoke.sh     |  96 ------
 .../WEBHCAT/package/scripts/__init__.py         |  20 --
 .../services/WEBHCAT/package/scripts/params.py  |  78 -----
 .../WEBHCAT/package/scripts/service_check.py    |  44 ---
 .../WEBHCAT/package/scripts/status_params.py    |  26 --
 .../services/WEBHCAT/package/scripts/webhcat.py | 107 ------
 .../WEBHCAT/package/scripts/webhcat_server.py   |  53 ---
 .../WEBHCAT/package/scripts/webhcat_service.py  |  40 ---
 .../stacks/HDP/1.3/role_command_order.json      |   4 +-
 .../stacks/HDP/1.3/services/HIVE/metainfo.xml   |   5 -
 .../services/HDFS/configuration/core-site.xml   |   9 +
 .../scripts/shared_initialization.py            |   3 +-
 .../stacks/HDP/2.0.6/role_command_order.json    |   4 +-
 .../services/HDFS/configuration/core-site.xml   |  10 +
 .../services/HIVE/configuration/webhcat-env.xml |  54 +++
 .../HIVE/configuration/webhcat-site.xml         | 138 ++++++++
 .../stacks/HDP/2.0.6/services/HIVE/metainfo.xml | 165 +++++-----
 .../HIVE/package/files/templetonSmoke.sh        |  96 ++++++
 .../services/HIVE/package/scripts/params.py     |  44 ++-
 .../HIVE/package/scripts/service_check.py       |   2 +
 .../HIVE/package/scripts/status_params.py       |   1 +
 .../services/HIVE/package/scripts/webhcat.py    | 112 +++++++
 .../HIVE/package/scripts/webhcat_server.py      |  53 +++
 .../HIVE/package/scripts/webhcat_service.py     |  40 +++
 .../package/scripts/webhcat_service_check.py    |  41 +++
 .../HDP/2.0.6/services/NAGIOS/metainfo.xml      |   2 +-
 .../templates/hadoop-servicegroups.cfg.j2       |   8 +-
 .../package/templates/hadoop-services.cfg.j2    |   2 +-
 .../services/OOZIE/package/scripts/params.py    |   3 +-
 .../services/SQOOP/package/scripts/params.py    |  12 +-
 .../WEBHCAT/configuration/webhcat-env.xml       |  54 ---
 .../WEBHCAT/configuration/webhcat-site.xml      | 138 --------
 .../HDP/2.0.6/services/WEBHCAT/metainfo.xml     | 110 -------
 .../WEBHCAT/package/files/templetonSmoke.sh     |  96 ------
 .../WEBHCAT/package/scripts/__init__.py         |  20 --
 .../services/WEBHCAT/package/scripts/params.py  | 102 ------
 .../WEBHCAT/package/scripts/service_check.py    |  45 ---
 .../WEBHCAT/package/scripts/status_params.py    |  26 --
 .../services/WEBHCAT/package/scripts/webhcat.py | 112 -------
 .../WEBHCAT/package/scripts/webhcat_server.py   |  53 ---
 .../WEBHCAT/package/scripts/webhcat_service.py  |  40 ---
 .../HDP/2.1.GlusterFS/role_command_order.json   |   4 +-
 .../2.1.GlusterFS/services/HIVE/metainfo.xml    | 100 +++---
 .../WEBHCAT/configuration/webhcat-site.xml      | 143 --------
 .../2.1.GlusterFS/services/WEBHCAT/metainfo.xml |  46 ---
 .../stacks/HDP/2.1/role_command_order.json      |   3 +-
 .../services/FALCON/package/scripts/params.py   |   9 +-
 .../stacks/HDP/2.1/services/HIVE/metainfo.xml   |  62 +++-
 .../services/STORM/configuration/storm-env.xml  |   2 +-
 .../WEBHCAT/configuration/webhcat-site.xml      | 143 --------
 .../HDP/2.1/services/WEBHCAT/metainfo.xml       |  47 ---
 .../stacks/HDP/2.2.1/services/HIVE/metainfo.xml |   4 -
 .../HDP/2.2.1/services/WEBHCAT/metainfo.xml     |  26 --
 .../resources/stacks/HDP/2.2/repos/repoinfo.xml |  27 +-
 .../stacks/HDP/2.2/services/FALCON/metainfo.xml |  12 +-
 .../stacks/HDP/2.2/services/FLUME/metainfo.xml  |   2 +-
 .../stacks/HDP/2.2/services/HBASE/metainfo.xml  |   2 +-
 .../services/HDFS/configuration/hadoop-env.xml  |   2 +-
 .../services/HDFS/configuration/hdfs-site.xml   |   2 +-
 .../stacks/HDP/2.2/services/HDFS/metainfo.xml   |   4 +-
 .../HIVE/configuration/webhcat-site.xml         |  59 ++++
 .../stacks/HDP/2.2/services/HIVE/metainfo.xml   |  48 ++-
 .../services/OOZIE/configuration/oozie-site.xml |   2 +-
 .../stacks/HDP/2.2/services/OOZIE/metainfo.xml  |  27 +-
 .../stacks/HDP/2.2/services/PIG/metainfo.xml    |   5 +-
 .../stacks/HDP/2.2/services/SQOOP/metainfo.xml  |  15 +-
 .../stacks/HDP/2.2/services/STORM/metainfo.xml  |  12 +-
 .../stacks/HDP/2.2/services/TEZ/metainfo.xml    |   2 +-
 .../WEBHCAT/configuration/webhcat-site.xml      |  59 ----
 .../HDP/2.2/services/WEBHCAT/metainfo.xml       |   2 +-
 .../YARN/configuration-mapred/mapred-site.xml   |   2 +-
 .../services/YARN/configuration/yarn-site.xml   |   2 +-
 .../stacks/HDP/2.2/services/YARN/metainfo.xml   |   6 +-
 .../HDP/2.2/services/ZOOKEEPER/metainfo.xml     |   2 +-
 .../api/util/StackExtensionHelperTest.java      |  29 +-
 .../AmbariManagementControllerTest.java         |   6 +-
 .../AmbariPrivilegeResourceProviderTest.java    |   2 +-
 .../internal/BaseBlueprintProcessorTest.java    |  27 +-
 .../BlueprintConfigurationProcessorTest.java    |  21 ++
 .../ViewInstanceResourceProviderTest.java       |  10 +-
 .../ViewPermissionResourceProviderTest.java     |   4 +-
 .../ViewPrivilegeResourceProviderTest.java      |   2 +-
 .../server/orm/entities/ViewEntityTest.java     |  22 +-
 .../server/upgrade/UpgradeCatalog170Test.java   | 241 ++++++++++----
 .../ambari/server/view/ViewRegistryTest.java    |  28 +-
 .../1.3.2/HIVE/test_hive_service_check.py       |  20 ++
 .../stacks/1.3.2/HIVE/test_webhcat_server.py    | 258 +++++++++++++++
 .../stacks/1.3.2/WEBHCAT/test_webhcat_server.py | 258 ---------------
 .../1.3.2/WEBHCAT/test_webhcat_service_check.py |  61 ----
 .../hooks/before-START/test_before_start.py     |   6 +-
 .../2.0.6/HIVE/test_hive_service_check.py       |  21 ++
 .../stacks/2.0.6/HIVE/test_webhcat_server.py    | 270 +++++++++++++++
 .../stacks/2.0.6/WEBHCAT/test_webhcat_server.py | 273 ---------------
 .../2.0.6/WEBHCAT/test_webhcat_service_check.py |  61 ----
 .../hooks/before-START/test_before_start.py     |   6 +-
 .../stacks/HDP/2.0.5/services/HIVE/metainfo.xml |  74 +++++
 .../WEBHCAT/configuration/webhcat-site.xml      | 126 -------
 .../HDP/2.0.5/services/WEBHCAT/metainfo.xml     | 102 ------
 .../HDP/2.0.6/services/WEBHCAT/metainfo.xml     |  28 --
 .../org/apache/ambari/view/ViewDefinition.java  |   8 +-
 ambari-web/app/assets/data/alerts/alerts.json   |   2 +-
 .../app/assets/data/dashboard/services.json     | 117 -------
 .../app/assets/data/hosts/HDP2/hosts.json       |  11 +-
 .../data/services/HDP2/components_state.json    |   9 -
 .../app/assets/data/services/HDP2/services.json |  16 -
 .../services/host_component_actual_configs.json | 118 +------
 .../data/stacks/HDP-2.1/recommendations.json    |   2 +-
 .../stacks/HDP-2.1/recommendations_configs.json |   2 +-
 .../data/stacks/HDP-2.1/service_components.json | 218 +++++-------
 .../data/wizard/stack/hdp/version/1.2.0.json    |  18 -
 .../data/wizard/stack/hdp/version/1.2.1.json    |  11 -
 .../data/wizard/stack/hdp/version/1.3.0.json    |  11 -
 .../data/wizard/stack/hdp/version/1.3.1.json    |  11 -
 .../data/wizard/stack/hdp/version/2.0.1.json    |  11 -
 .../data/wizard/stack/hdp/version/2.0.5.json    |  11 -
 .../data/wizard/stack/hdp/version0.1.json       |  17 -
 .../wizard/stack/hdp/version01/HCATALOG.json    |  20 --
 .../wizard/stack/hdp/version1.2.1/HCATALOG.json |   4 -
 .../wizard/stack/hdp/version1.3.0/HCATALOG.json |   4 -
 .../wizard/stack/hdp/version131/HCATALOG.json   |   4 -
 .../wizard/stack/hdp/version2.0.1/HCATALOG.json |   4 -
 .../app/assets/data/wizard/stack/stacks.json    |  55 ----
 .../main/admin/security/add/step2.js            |   4 +-
 .../main/admin/serviceAccounts_controller.js    |   2 +-
 .../controllers/main/service/info/configs.js    | 130 +++++---
 ambari-web/app/controllers/main/service/item.js |   2 +-
 .../app/controllers/wizard/step5_controller.js  |  92 ++++--
 .../app/controllers/wizard/step6_controller.js  |  31 +-
 .../app/controllers/wizard/step7_controller.js  |  18 +-
 .../app/controllers/wizard/step8_controller.js  |   3 +-
 ambari-web/app/data/HDP2/secure_configs.js      |  15 +-
 ambari-web/app/data/HDP2/secure_mapping.js      |  12 +-
 ambari-web/app/data/HDP2/secure_properties.js   |   8 +-
 ambari-web/app/data/HDP2/site_properties.js     |  22 +-
 ambari-web/app/data/secure_configs.js           |  15 +-
 ambari-web/app/data/secure_mapping.js           |  12 +-
 ambari-web/app/data/secure_properties.js        |   8 +-
 ambari-web/app/data/site_properties.js          |  22 +-
 ambari-web/app/mixins/common/serverValidator.js |   8 +-
 .../app/mixins/wizard/addSecurityConfigs.js     |   2 +-
 ambari-web/app/models/service.js                |  12 -
 ambari-web/app/models/stack_service.js          |  15 +-
 ambari-web/app/styles/application.less          |  14 +
 ambari-web/app/templates/application.hbs        |   2 +-
 .../app/templates/main/service/info/configs.hbs |   2 +-
 .../app/templates/main/service/info/summary.hbs |  26 +-
 ambari-web/app/utils/ajax/ajax.js               |   9 +
 .../app/utils/batch_scheduled_requests.js       |   1 -
 ambari-web/app/utils/config.js                  |   2 +-
 ambari-web/app/utils/helper.js                  |   1 -
 .../views/common/configs/config_history_flow.js |   2 +-
 .../app/views/common/quick_view_link_view.js    |   6 +-
 .../app/views/main/host/configs_service_menu.js |   2 +-
 .../app/views/main/service/info/summary.js      |  64 +++-
 ambari-web/app/views/main/service/item.js       |   7 +-
 ambari-web/app/views/main/service/menu.js       |   8 +-
 .../security/add/addSecurity_controller_test.js |   2 -
 .../main/service/info/config_test.js            |   8 +-
 .../test/controllers/wizard/step4_test.js       |  34 +-
 .../test/controllers/wizard/step8_test.js       |  14 +-
 ambari-web/test/models/service_test.js          |   8 -
 ambari-web/test/models/stack_service_test.js    |   8 -
 ambari-web/test/service_components.js           | 229 +++++--------
 ambari-web/test/utils/helper_test.js            |   1 -
 .../src/addOns/nagios/scripts/nagios_alerts.php |   8 +-
 .../apache/ambari/msi/ClusterDefinition.java    |   2 +-
 .../ambari/msi/ClusterDefinitionTest.java       |   4 -
 .../Scripts/HostComponentsDiscovery.ps1         |   3 +-
 .../slider-core/0.41.0/slider-core-0.41.0.jar   | Bin 1125710 -> 0 bytes
 .../slider-core/0.41.0/slider-core-0.41.0.pom   |  25 --
 .../slider-core/0.51.0/slider-core-0.51.0.jar   | Bin 0 -> 1144236 bytes
 .../slider-core/0.51.0/slider-core-0.51.0.pom   |  25 ++
 .../slider/slider-core/maven-metadata-local.xml |   6 +-
 contrib/views/slider/lib/slider-agent.tar.gz    | Bin 0 -> 480985 bytes
 contrib/views/slider/pom.xml                    |  10 +-
 .../view/slider/SliderAppsResourceProvider.java |  15 +-
 .../view/slider/SliderAppsViewController.java   |   9 +-
 .../slider/SliderAppsViewControllerImpl.java    | 329 +++++++++++--------
 .../view/slider/rest/SliderAppsResource.java    |   2 +-
 .../ambari/view/slider/rest/client/Metric.java  |   3 +
 .../assets/data/resource/service_status.json    |  12 -
 .../main/resources/ui/app/models/slider_app.js  |   2 +-
 .../resources/ui/app/styles/application.less    |   1 -
 225 files changed, 3669 insertions(+), 4671 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/17b8e799/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog170.java
----------------------------------------------------------------------
diff --cc ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog170.java
index 4ee2ac2,75635cc..435dd37
--- a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog170.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog170.java
@@@ -18,75 -18,30 +18,88 @@@
  
  package org.apache.ambari.server.upgrade;
  
 -import com.google.common.reflect.TypeToken;
 -import com.google.inject.Inject;
 -import com.google.inject.Injector;
 +import java.lang.reflect.Type;
- 
 +import java.sql.Connection;
 +import java.sql.PreparedStatement;
 +import java.sql.ResultSet;
 +import java.sql.SQLException;
 +import java.util.ArrayList;
 +import java.util.Collections;
++import java.util.Date;
 +import java.util.HashMap;
 +import java.util.HashSet;
++import java.util.Iterator;
 +import java.util.List;
 +import java.util.Map;
 +import java.util.Map.Entry;
 +import java.util.Set;
- import java.util.Date;
 +
 +import javax.persistence.EntityManager;
 +import javax.persistence.TypedQuery;
 +import javax.persistence.criteria.CriteriaBuilder;
 +import javax.persistence.criteria.CriteriaQuery;
 +import javax.persistence.criteria.Expression;
 +import javax.persistence.criteria.Predicate;
 +import javax.persistence.criteria.Root;
 +
- import com.google.common.reflect.TypeToken;
  import org.apache.ambari.server.AmbariException;
  import org.apache.ambari.server.configuration.Configuration;
  import org.apache.ambari.server.controller.AmbariManagementController;
  import org.apache.ambari.server.orm.DBAccessor.DBColumnInfo;
 -import org.apache.ambari.server.orm.dao.*;
 -import org.apache.ambari.server.orm.entities.*;
 -import org.apache.ambari.server.state.*;
 +import org.apache.ambari.server.orm.dao.ClusterDAO;
++import org.apache.ambari.server.orm.dao.ClusterServiceDAO;
++import org.apache.ambari.server.orm.dao.ConfigGroupConfigMappingDAO;
 +import org.apache.ambari.server.orm.dao.DaoUtils;
++import org.apache.ambari.server.orm.dao.HostComponentDesiredStateDAO;
++import org.apache.ambari.server.orm.dao.HostComponentStateDAO;
 +import org.apache.ambari.server.orm.dao.HostRoleCommandDAO;
 +import org.apache.ambari.server.orm.dao.KeyValueDAO;
 +import org.apache.ambari.server.orm.dao.PermissionDAO;
 +import org.apache.ambari.server.orm.dao.PrincipalDAO;
 +import org.apache.ambari.server.orm.dao.PrincipalTypeDAO;
 +import org.apache.ambari.server.orm.dao.PrivilegeDAO;
 +import org.apache.ambari.server.orm.dao.ResourceDAO;
 +import org.apache.ambari.server.orm.dao.ResourceTypeDAO;
- import org.apache.ambari.server.orm.dao.ConfigGroupConfigMappingDAO;
++import org.apache.ambari.server.orm.dao.ServiceComponentDesiredStateDAO;
++import org.apache.ambari.server.orm.dao.ServiceDesiredStateDAO;
 +import org.apache.ambari.server.orm.dao.UserDAO;
 +import org.apache.ambari.server.orm.dao.ViewDAO;
 +import org.apache.ambari.server.orm.dao.ViewInstanceDAO;
++import org.apache.ambari.server.orm.entities.ClusterConfigEntity;
 +import org.apache.ambari.server.orm.entities.ClusterEntity;
- import org.apache.ambari.server.orm.entities.HostRoleCommandEntity;
++import org.apache.ambari.server.orm.entities.ClusterServiceEntity;
++import org.apache.ambari.server.orm.entities.ClusterServiceEntityPK;
 +import org.apache.ambari.server.orm.entities.ConfigGroupConfigMappingEntity;
- import org.apache.ambari.server.orm.entities.ClusterConfigEntity;
++import org.apache.ambari.server.orm.entities.HostComponentDesiredStateEntity;
++import org.apache.ambari.server.orm.entities.HostComponentStateEntity;
++import org.apache.ambari.server.orm.entities.HostRoleCommandEntity;
 +import org.apache.ambari.server.orm.entities.HostRoleCommandEntity_;
 +import org.apache.ambari.server.orm.entities.KeyValueEntity;
 +import org.apache.ambari.server.orm.entities.PermissionEntity;
 +import org.apache.ambari.server.orm.entities.PrincipalEntity;
 +import org.apache.ambari.server.orm.entities.PrincipalTypeEntity;
 +import org.apache.ambari.server.orm.entities.PrivilegeEntity;
 +import org.apache.ambari.server.orm.entities.ResourceEntity;
 +import org.apache.ambari.server.orm.entities.ResourceTypeEntity;
++import org.apache.ambari.server.orm.entities.ServiceComponentDesiredStateEntity;
++import org.apache.ambari.server.orm.entities.ServiceComponentDesiredStateEntityPK;
++import org.apache.ambari.server.orm.entities.ServiceDesiredStateEntity;
++import org.apache.ambari.server.orm.entities.ServiceDesiredStateEntityPK;
 +import org.apache.ambari.server.orm.entities.UserEntity;
 +import org.apache.ambari.server.orm.entities.ViewEntity;
 +import org.apache.ambari.server.orm.entities.ViewInstanceEntity;
 +import org.apache.ambari.server.state.Cluster;
 +import org.apache.ambari.server.state.Clusters;
 +import org.apache.ambari.server.state.Config;
 +import org.apache.ambari.server.state.ConfigHelper;
 +import org.apache.ambari.server.state.alert.Scope;
  import org.apache.ambari.server.utils.StageUtils;
  import org.slf4j.Logger;
  import org.slf4j.LoggerFactory;
  
 -import javax.persistence.EntityManager;
 -import javax.persistence.TypedQuery;
 -import javax.persistence.criteria.*;
 -import java.lang.reflect.Type;
 -import java.sql.Connection;
 -import java.sql.PreparedStatement;
 -import java.sql.ResultSet;
 -import java.sql.SQLException;
 -import java.util.*;
 -import java.util.Map.Entry;
++import com.google.common.reflect.TypeToken;
 +import com.google.inject.Inject;
 +import com.google.inject.Injector;
  
  /**
   * Upgrade catalog for version 1.7.0.
@@@ -598,6 -556,117 +614,118 @@@ public class UpgradeCatalog170 extends 
      moveConfigGroupsGlobalToEnv();
    }
  
+   public void moveHcatalogIntoHiveService() throws AmbariException {
+     final String serviceName = "HIVE";
+     final String serviceNameToBeDeleted = "HCATALOG";
+     final String componentName = "HCAT";
+     moveComponentsIntoService(serviceName, serviceNameToBeDeleted, componentName);
+   }
+ 
+   private void moveWebHcatIntoHiveService() throws AmbariException {
+     final String serviceName = "HIVE";
+     final String serviceNameToBeDeleted = "WEBHCAT";
+     final String componentName = "WEBHCAT_SERVER";
+     moveComponentsIntoService(serviceName, serviceNameToBeDeleted, componentName);
+   }
+ 
+   private void moveComponentsIntoService(String serviceName, String serviceNameToBeDeleted, String componentName) throws AmbariException {
+     /**
+      * 1. ADD servicecomponentdesiredstate: Add HCAT HIVE entry:
+      * 2. Update hostcomponentdesiredstate: service_name to HIVE where service_name is HCATALOG:
+      * 3. Update hostcomponentstate: service_name to HIVE where service_name is HCATALOG:
+      * 4. DELETE servicecomponentdesiredstate: where component_name is HCAT and service_name is HCATALOG :
+      * 5. Delete servicedesiredstate where  service_name is HCATALOG:
+      * 6. Delete clusterservices where service_name is  HCATALOG:
+      */
+     ClusterDAO clusterDAO = injector.getInstance(ClusterDAO.class);
+     ClusterServiceDAO clusterServiceDAO = injector.getInstance(ClusterServiceDAO.class);
+     ServiceDesiredStateDAO serviceDesiredStateDAO = injector.getInstance(ServiceDesiredStateDAO.class);
+     ServiceComponentDesiredStateDAO serviceComponentDesiredStateDAO = injector.getInstance(ServiceComponentDesiredStateDAO.class);
+     HostComponentDesiredStateDAO hostComponentDesiredStateDAO = injector.getInstance(HostComponentDesiredStateDAO.class);
+     HostComponentStateDAO hostComponentStateDAO = injector.getInstance(HostComponentStateDAO.class);
+ 
+     List<ClusterEntity> clusterEntities = clusterDAO.findAll();
+     for (final ClusterEntity clusterEntity : clusterEntities) {
+       ServiceComponentDesiredStateEntityPK pkHCATInHcatalog = new ServiceComponentDesiredStateEntityPK();
+       pkHCATInHcatalog.setComponentName(componentName);
+       pkHCATInHcatalog.setClusterId(clusterEntity.getClusterId());
+       pkHCATInHcatalog.setServiceName(serviceNameToBeDeleted);
+       ServiceComponentDesiredStateEntity serviceComponentDesiredStateEntityToDelete = serviceComponentDesiredStateDAO.findByPK(pkHCATInHcatalog);
+ 
 -      if (serviceComponentDesiredStateEntityToDelete == null)
++      if (serviceComponentDesiredStateEntityToDelete == null) {
+         continue;
++      }
+ 
+       ServiceDesiredStateEntityPK serviceDesiredStateEntityPK = new ServiceDesiredStateEntityPK();
+       serviceDesiredStateEntityPK.setClusterId(clusterEntity.getClusterId());
+       serviceDesiredStateEntityPK.setServiceName(serviceNameToBeDeleted);
+       ServiceDesiredStateEntity serviceDesiredStateEntity = serviceDesiredStateDAO.findByPK(serviceDesiredStateEntityPK);
+ 
+       ClusterServiceEntityPK clusterServiceEntityToBeDeletedPK = new ClusterServiceEntityPK();
+       clusterServiceEntityToBeDeletedPK.setClusterId(clusterEntity.getClusterId());
+       clusterServiceEntityToBeDeletedPK.setServiceName(serviceNameToBeDeleted);
+       ClusterServiceEntity clusterServiceEntityToBeDeleted = clusterServiceDAO.findByPK(clusterServiceEntityToBeDeletedPK);
+ 
+       ClusterServiceEntityPK clusterServiceEntityPK = new ClusterServiceEntityPK();
+       clusterServiceEntityPK.setClusterId(clusterEntity.getClusterId());
+       clusterServiceEntityPK.setServiceName(serviceName);
+ 
+ 
+       ClusterServiceEntity clusterServiceEntity = clusterServiceDAO.findByPK(clusterServiceEntityPK);
+ 
+       ServiceComponentDesiredStateEntity serviceComponentDesiredStateEntity = new ServiceComponentDesiredStateEntity();
+       serviceComponentDesiredStateEntity.setServiceName(serviceName);
+       serviceComponentDesiredStateEntity.setComponentName(serviceComponentDesiredStateEntityToDelete.getComponentName());
+       serviceComponentDesiredStateEntity.setClusterId(clusterEntity.getClusterId());
+       serviceComponentDesiredStateEntity.setDesiredStackVersion(serviceComponentDesiredStateEntityToDelete.getDesiredStackVersion());
+       serviceComponentDesiredStateEntity.setDesiredState(serviceComponentDesiredStateEntityToDelete.getDesiredState());
+       serviceComponentDesiredStateEntity.setClusterServiceEntity(clusterServiceEntity);
+       //serviceComponentDesiredStateDAO.create(serviceComponentDesiredStateEntity);
+ 
+       Iterator<HostComponentDesiredStateEntity> hostComponentDesiredStateIterator = serviceComponentDesiredStateEntityToDelete.getHostComponentDesiredStateEntities().iterator();
+       Iterator<HostComponentStateEntity> hostComponentStateIterator = serviceComponentDesiredStateEntityToDelete.getHostComponentStateEntities().iterator();
+ 
+       while (hostComponentDesiredStateIterator.hasNext()) {
+         HostComponentDesiredStateEntity hcDesiredStateEntityToBeDeleted = hostComponentDesiredStateIterator.next();
+         HostComponentDesiredStateEntity hostComponentDesiredStateEntity = new HostComponentDesiredStateEntity();
+         hostComponentDesiredStateEntity.setClusterId(clusterEntity.getClusterId());
+         hostComponentDesiredStateEntity.setComponentName(hcDesiredStateEntityToBeDeleted.getComponentName());
+         hostComponentDesiredStateEntity.setDesiredStackVersion(hcDesiredStateEntityToBeDeleted.getDesiredStackVersion());
+         hostComponentDesiredStateEntity.setDesiredState(hcDesiredStateEntityToBeDeleted.getDesiredState());
+         hostComponentDesiredStateEntity.setHostName(hcDesiredStateEntityToBeDeleted.getHostName());
+         hostComponentDesiredStateEntity.setHostEntity(hcDesiredStateEntityToBeDeleted.getHostEntity());
+         hostComponentDesiredStateEntity.setAdminState(hcDesiredStateEntityToBeDeleted.getAdminState());
+         hostComponentDesiredStateEntity.setMaintenanceState(hcDesiredStateEntityToBeDeleted.getMaintenanceState());
+         hostComponentDesiredStateEntity.setRestartRequired(hcDesiredStateEntityToBeDeleted.isRestartRequired());
+         hostComponentDesiredStateEntity.setServiceName(serviceName);
+         hostComponentDesiredStateEntity.setServiceComponentDesiredStateEntity(serviceComponentDesiredStateEntity);
+         hostComponentDesiredStateDAO.merge(hostComponentDesiredStateEntity);
+         hostComponentDesiredStateDAO.remove(hcDesiredStateEntityToBeDeleted);
+       }
+ 
+       while (hostComponentStateIterator.hasNext()) {
+         HostComponentStateEntity hcStateToBeDeleted = hostComponentStateIterator.next();
+         HostComponentStateEntity hostComponentStateEntity = new HostComponentStateEntity();
+         hostComponentStateEntity.setClusterId(clusterEntity.getClusterId());
+         hostComponentStateEntity.setComponentName(hcStateToBeDeleted.getComponentName());
+         hostComponentStateEntity.setCurrentStackVersion(hcStateToBeDeleted.getCurrentStackVersion());
+         hostComponentStateEntity.setCurrentState(hcStateToBeDeleted.getCurrentState());
+         hostComponentStateEntity.setHostName(hcStateToBeDeleted.getHostName());
+         hostComponentStateEntity.setHostEntity(hcStateToBeDeleted.getHostEntity());
+         hostComponentStateEntity.setServiceName(serviceName);
+         hostComponentStateEntity.setServiceComponentDesiredStateEntity(serviceComponentDesiredStateEntity);
+         hostComponentStateDAO.merge(hostComponentStateEntity);
+         hostComponentStateDAO.remove(hcStateToBeDeleted);
+       }
+       serviceComponentDesiredStateEntity.setClusterServiceEntity(clusterServiceEntity);
+       serviceComponentDesiredStateDAO.merge(serviceComponentDesiredStateEntity);
+       serviceComponentDesiredStateDAO.remove(serviceComponentDesiredStateEntityToDelete);
+       serviceDesiredStateDAO.remove(serviceDesiredStateEntity);
+       clusterServiceDAO.remove(clusterServiceEntityToBeDeleted);
+     }
+   }
+ 
+ 
    private void moveConfigGroupsGlobalToEnv() throws AmbariException {
      final ConfigGroupConfigMappingDAO confGroupConfMappingDAO = injector.getInstance(ConfigGroupConfigMappingDAO.class);
      ConfigHelper configHelper = injector.getInstance(ConfigHelper.class);

http://git-wip-us.apache.org/repos/asf/ambari/blob/17b8e799/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java
----------------------------------------------------------------------
diff --cc ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java
index 6c868b4,8a3e270..8966fc0
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java
@@@ -3131,7 -3059,7 +3131,7 @@@ public class AmbariManagementController
      Assert.assertNull(stage1.getExecutionCommandWrapper(host2, "DATANODE"));
      Assert.assertNotNull(stage3.getExecutionCommandWrapper(host1, "HBASE_SERVICE_CHECK"));
      Assert.assertNotNull(stage2.getExecutionCommandWrapper(host2, "HDFS_SERVICE_CHECK"));
-- 
++
      Type type = new TypeToken<Map<String, String>>() {}.getType();
  
  
@@@ -6516,7 -6442,7 +6516,7 @@@
          }
        }
      }
-- 
++
      Type type = new TypeToken<Map<String, String>>(){}.getType();
      for (Stage stage : actionDB.getAllStages(requestId)){
        Map<String, String> hostParamsStage = StageUtils.getGson().fromJson(stage.getHostParamsStage(), type);

http://git-wip-us.apache.org/repos/asf/ambari/blob/17b8e799/ambari-web/app/utils/ajax/ajax.js
----------------------------------------------------------------------


[02/27] git commit: AMBARI-7270. FE: Errors in /recommendation API being silently ignored

Posted by jo...@apache.org.
AMBARI-7270. FE: Errors in /recommendation API being silently ignored


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/a96b3f8f
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/a96b3f8f
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/a96b3f8f

Branch: refs/heads/branch-alerts-dev
Commit: a96b3f8fec14bef2b14a729c9e2a9617f757677e
Parents: 1dcb9dc
Author: Srimanth Gunturi <sg...@hortonworks.com>
Authored: Fri Sep 12 17:41:26 2014 -0700
Committer: Srimanth Gunturi <sg...@hortonworks.com>
Committed: Fri Sep 12 19:24:45 2014 -0700

----------------------------------------------------------------------
 .../stackadvisor/StackAdvisorRunner.java        | 10 +++-
 .../RecommendationResourceProvider.java         |  6 +--
 .../internal/ValidationResourceProvider.java    |  6 +--
 .../app/controllers/wizard/step5_controller.js  | 54 ++++++++++++--------
 .../app/controllers/wizard/step6_controller.js  | 31 +++++++----
 .../app/controllers/wizard/step7_controller.js  | 15 +++++-
 ambari-web/app/mixins/common/serverValidator.js |  8 +--
 7 files changed, 84 insertions(+), 46 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/a96b3f8f/ambari-server/src/main/java/org/apache/ambari/server/api/services/stackadvisor/StackAdvisorRunner.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/api/services/stackadvisor/StackAdvisorRunner.java b/ambari-server/src/main/java/org/apache/ambari/server/api/services/stackadvisor/StackAdvisorRunner.java
index ee7dcc2..a50f915 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/api/services/stackadvisor/StackAdvisorRunner.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/api/services/stackadvisor/StackAdvisorRunner.java
@@ -75,7 +75,15 @@ public class StackAdvisorRunner {
         if (exitCode > 0) {
           String errorMessage;
           if (errMessage != null) {
-            errorMessage = errMessage.substring(errMessage.lastIndexOf("\n"));
+            // We want to get the last line.
+            int index = errMessage.lastIndexOf("\n");
+            if (index > 0 && index == (errMessage.length() - 1)) {
+              index = errMessage.lastIndexOf("\n", index - 1); // sentence ended with newline
+            }
+            if (index > -1) {
+              errMessage = errMessage.substring(index + 1).trim();
+            }
+            errorMessage = errMessage;
           } else {
             errorMessage = "Error occurred during stack advisor execution";
           }

http://git-wip-us.apache.org/repos/asf/ambari/blob/a96b3f8f/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/RecommendationResourceProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/RecommendationResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/RecommendationResourceProvider.java
index b722825..40a1791 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/RecommendationResourceProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/RecommendationResourceProvider.java
@@ -93,12 +93,10 @@ public class RecommendationResourceProvider extends StackAdvisorResourceProvider
       response = saHelper.recommend(recommendationRequest);
     } catch (StackAdvisorRequestException e) {
       LOG.warn("Error occured during recommnedation", e);
-      throw new WebApplicationException(Response.status(Status.BAD_REQUEST).entity(e.getMessage())
-          .build());
+      throw new IllegalArgumentException(e.getMessage(), e);
     } catch (StackAdvisorException e) {
       LOG.warn("Error occured during recommnedation", e);
-      throw new WebApplicationException(Response.status(Status.INTERNAL_SERVER_ERROR).entity(e.getMessage())
-          .build());
+      throw new SystemException(e.getMessage(), e);
     }
 
     Resource recommendation = createResources(new Command<Resource>() {

http://git-wip-us.apache.org/repos/asf/ambari/blob/a96b3f8f/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ValidationResourceProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ValidationResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ValidationResourceProvider.java
index 941fb19..d77cf7d 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ValidationResourceProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ValidationResourceProvider.java
@@ -85,12 +85,10 @@ public class ValidationResourceProvider extends StackAdvisorResourceProvider {
       response = saHelper.validate(validationRequest);
     } catch (StackAdvisorRequestException e) {
       LOG.warn("Error occurred during validation", e);
-      throw new WebApplicationException(Response.status(Status.BAD_REQUEST).entity(e.getMessage())
-          .build());
+      throw new IllegalArgumentException(e.getMessage(), e);
     } catch (StackAdvisorException e) {
       LOG.warn("Error occurred during validation", e);
-      throw new WebApplicationException(Response.status(Status.INTERNAL_SERVER_ERROR).entity(e.getMessage())
-          .build());
+      throw new SystemException(e.getMessage(), e);
     }
 
     Resource validation = createResources(new Command<Resource>() {

http://git-wip-us.apache.org/repos/asf/ambari/blob/a96b3f8f/ambari-web/app/controllers/wizard/step5_controller.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/controllers/wizard/step5_controller.js b/ambari-web/app/controllers/wizard/step5_controller.js
index e08e709..258e856 100644
--- a/ambari-web/app/controllers/wizard/step5_controller.js
+++ b/ambari-web/app/controllers/wizard/step5_controller.js
@@ -281,19 +281,13 @@ App.WizardStep5Controller = Em.Controller.extend(App.BlueprintMixin, {
         validate: 'host_groups',
         recommendations: blueprint
       },
-      success: 'updateValidationsSuccessCallback'
+      success: 'updateValidationsSuccessCallback',
+      error: 'updateValidationsErrorCallback'
     }).
-      retry({
-        times: App.maxRetries,
-        timeout: App.timeout
-      }).
       then(function() {
         if (callback) {
           callback();
         }
-      }, function () {
-        App.showReloadPopup();
-        console.log('Load validations failed');
       }
     );
   },
@@ -306,8 +300,8 @@ App.WizardStep5Controller = Em.Controller.extend(App.BlueprintMixin, {
   updateValidationsSuccessCallback: function (data) {
     var self = this;
 
-    generalErrorMessages = [];
-    generalWarningMessages = [];
+    var generalErrorMessages = [];
+    var generalWarningMessages = [];
     this.get('servicesMasters').setEach('warnMessage', null);
     this.get('servicesMasters').setEach('errorMessage', null);
     var anyErrors = false;
@@ -344,6 +338,19 @@ App.WizardStep5Controller = Em.Controller.extend(App.BlueprintMixin, {
   },
 
   /**
+   * Error-callback for validations request
+   * @param {object} jqXHR
+   * @param {object} ajaxOptions
+   * @param {string} error
+   * @param {object} opt
+   * @method updateValidationsErrorCallback
+   */
+  updateValidationsErrorCallback: function (jqXHR, ajaxOptions, error, opt) {
+    App.ajax.defaultErrorHandler(jqXHR, opt.url, opt.method, jqXHR.status);
+    console.log('Load validations failed');
+  },
+
+  /**
    * Composes selected values of comboboxes into master blueprint + merge it with currenlty installed slave blueprint
    */
   getCurrentBlueprint: function() {
@@ -532,20 +539,12 @@ App.WizardStep5Controller = Em.Controller.extend(App.BlueprintMixin, {
         name: 'wizard.loadrecommendations',
         sender: self,
         data: data,
-        success: 'loadRecommendationsSuccessCallback'
+        success: 'loadRecommendationsSuccessCallback',
+        error: 'loadRecommendationsErrorCallback'
       }).
-        retry({
-          times: App.maxRetries,
-          timeout: App.timeout
-        }).
         then(function () {
           callback(self.createComponentInstallationObjects(), self);
-        },
-        function () {
-          App.showReloadPopup();
-          console.log('Load recommendations failed');
-        }
-      );
+        });
     }
   },
 
@@ -648,6 +647,19 @@ App.WizardStep5Controller = Em.Controller.extend(App.BlueprintMixin, {
   },
 
   /**
+   * Error-callback for recommendations request
+   * @param {object} jqXHR
+   * @param {object} ajaxOptions
+   * @param {string} error
+   * @param {object} opt
+   * @method loadRecommendationsErrorCallback
+   */
+  loadRecommendationsErrorCallback: function (jqXHR, ajaxOptions, error, opt) {
+    App.ajax.defaultErrorHandler(jqXHR, opt.url, opt.method, jqXHR.status);
+    console.log('Load recommendations failed');
+  },
+
+  /**
    * Load services info to appropriate variable and return masterComponentHosts
    * @return {Object[]}
    */

http://git-wip-us.apache.org/repos/asf/ambari/blob/a96b3f8f/ambari-web/app/controllers/wizard/step6_controller.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/controllers/wizard/step6_controller.js b/ambari-web/app/controllers/wizard/step6_controller.js
index 3bd11c5..1142c8a 100644
--- a/ambari-web/app/controllers/wizard/step6_controller.js
+++ b/ambari-web/app/controllers/wizard/step6_controller.js
@@ -69,7 +69,7 @@ App.WizardStep6Controller = Em.Controller.extend(App.BlueprintMixin, {
    * Define state for submit button
    * @type {bool}
    */
-  submitDisabled: true,
+  submitDisabled: false,
 
   /**
    * Check if <code>addHostWizard</code> used
@@ -544,7 +544,9 @@ App.WizardStep6Controller = Em.Controller.extend(App.BlueprintMixin, {
    */
   callServerSideValidation: function (successCallback) {
     var self = this;
-    self.set('submitDisabled', true);
+
+    // We do not want to disable Next due to server validation issues - hence commented out line below
+    // self.set('submitDisabled', true);
 
     var selectedServices = App.StackService.find().filterProperty('isSelected').mapProperty('serviceName');
     var installedServices = App.StackService.find().filterProperty('isInstalled').mapProperty('serviceName');
@@ -593,19 +595,13 @@ App.WizardStep6Controller = Em.Controller.extend(App.BlueprintMixin, {
         validate: 'host_groups',
         recommendations: bluePrintsForValidation
       },
-      success: 'updateValidationsSuccessCallback'
+      success: 'updateValidationsSuccessCallback',
+      error: 'updateValidationsErrorCallback'
     }).
-      retry({
-        times: App.maxRetries,
-        timeout: App.timeout
-      }).
       then(function () {
         if (!self.get('submitDisabled') && successCallback) {
           successCallback();
         }
-      }, function () {
-        App.showReloadPopup();
-        console.log('Load validations failed');
       }
     );
   },
@@ -690,7 +686,20 @@ App.WizardStep6Controller = Em.Controller.extend(App.BlueprintMixin, {
 
     // use this.set('submitDisabled', anyErrors); is validation results should block next button
     // It's because showValidationIssuesAcceptBox allow use accept validation issues and continue
-    this.set('submitDisabled', false);
+    // this.set('submitDisabled', false);
+  },
+
+  /**
+   * Error-callback for validations request
+   * @param {object} jqXHR
+   * @param {object} ajaxOptions
+   * @param {string} error
+   * @param {object} opt
+   * @method updateValidationsErrorCallback
+   */
+  updateValidationsErrorCallback: function (jqXHR, ajaxOptions, error, opt) {
+    App.ajax.defaultErrorHandler(jqXHR, opt.url, opt.method, jqXHR.status);
+    console.log('Load validations failed');
   },
 
   /**

http://git-wip-us.apache.org/repos/asf/ambari/blob/a96b3f8f/ambari-web/app/controllers/wizard/step7_controller.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/controllers/wizard/step7_controller.js b/ambari-web/app/controllers/wizard/step7_controller.js
index 45647bb..414dfdd 100644
--- a/ambari-web/app/controllers/wizard/step7_controller.js
+++ b/ambari-web/app/controllers/wizard/step7_controller.js
@@ -1343,11 +1343,22 @@ App.WizardStep7Controller = Em.Controller.extend(App.ServerValidatorMixin, {
     }
     var self = this;
     this.set('submitButtonClicked', true);
-    this.serverSideValidation().done(function () {
-      self.checkDatabaseConnectionTest().done(function () {
+    this.serverSideValidation().done(function() {
+      self.checkDatabaseConnectionTest().done(function() {
         self.resolveHiveMysqlDatabase();
         self.set('submitButtonClicked', false);
       });
+    }).fail(function(value){
+      if ("invalid_configs" == value) {
+        self.set('submitButtonClicked', false);
+      } else {
+        // Failed due to validation mechanism failure.
+        // Should proceed with other checks
+        self.checkDatabaseConnectionTest().done(function() {
+          self.resolveHiveMysqlDatabase();
+          self.set('submitButtonClicked', false);
+        });
+      }
     });
   }
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/a96b3f8f/ambari-web/app/mixins/common/serverValidator.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/mixins/common/serverValidator.js b/ambari-web/app/mixins/common/serverValidator.js
index b520071..bea7bd7 100644
--- a/ambari-web/app/mixins/common/serverValidator.js
+++ b/ambari-web/app/mixins/common/serverValidator.js
@@ -134,7 +134,8 @@ App.ServerValidatorMixin = Em.Mixin.create({
     this.set("recommendationsConfigs", Em.get(data.resources[0] , "recommendations.blueprint.configurations"));
   },
 
-  loadRecommendationsError: function() {
+  loadRecommendationsError: function(jqXHR, ajaxOptions, error, opt) {
+    App.ajax.defaultErrorHandler(jqXHR, opt.url, opt.method, jqXHR.status);
     console.error('Load recommendations failed');
   },
 
@@ -251,8 +252,9 @@ App.ServerValidatorMixin = Em.Mixin.create({
     });
   },
 
-  validationError: function() {
+  validationError: function (jqXHR, ajaxOptions, error, opt) {
     this.set('configValidationFailed', true);
+    App.ajax.defaultErrorHandler(jqXHR, opt.url, opt.method, jqXHR.status);
     console.error('config validation failed');
   },
 
@@ -279,7 +281,7 @@ App.ServerValidatorMixin = Em.Mixin.create({
         },
         onSecondary: function () {
           this.hide();
-          deferred.reject();
+          deferred.reject("invalid_configs"); // message used to differentiate types of rejections.
         },
         bodyClass: Em.View.extend({
           controller: self,


[22/27] git commit: AMBARI-7299. Slider View: Update slider-core JAR to 0.51.0 version (srimanth)

Posted by jo...@apache.org.
AMBARI-7299. Slider View: Update slider-core JAR to 0.51.0 version (srimanth)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/26cd2f4b
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/26cd2f4b
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/26cd2f4b

Branch: refs/heads/branch-alerts-dev
Commit: 26cd2f4b0f1e7ed23aab9fef3e6365bc4d91094e
Parents: f84ee8d
Author: Srimanth Gunturi <sg...@hortonworks.com>
Authored: Sun Sep 14 17:20:06 2014 -0700
Committer: Srimanth Gunturi <sg...@hortonworks.com>
Committed: Mon Sep 15 15:32:20 2014 -0700

----------------------------------------------------------------------
 .../slider-core/0.41.0/slider-core-0.41.0.jar   | Bin 1125710 -> 0 bytes
 .../slider-core/0.41.0/slider-core-0.41.0.pom   |  25 --
 .../slider-core/0.51.0/slider-core-0.51.0.jar   | Bin 0 -> 1144236 bytes
 .../slider-core/0.51.0/slider-core-0.51.0.pom   |  25 ++
 .../slider/slider-core/maven-metadata-local.xml |   6 +-
 contrib/views/slider/lib/slider-agent.tar.gz    | Bin 0 -> 480985 bytes
 contrib/views/slider/pom.xml                    |   2 +-
 .../view/slider/SliderAppsResourceProvider.java |  15 +-
 .../view/slider/SliderAppsViewController.java   |   9 +-
 .../slider/SliderAppsViewControllerImpl.java    | 324 ++++++++++---------
 .../view/slider/rest/SliderAppsResource.java    |   2 +-
 .../ambari/view/slider/rest/client/Metric.java  |   3 +
 12 files changed, 228 insertions(+), 183 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/26cd2f4b/contrib/views/slider/lib/org/apache/slider/slider-core/0.41.0/slider-core-0.41.0.jar
----------------------------------------------------------------------
diff --git a/contrib/views/slider/lib/org/apache/slider/slider-core/0.41.0/slider-core-0.41.0.jar b/contrib/views/slider/lib/org/apache/slider/slider-core/0.41.0/slider-core-0.41.0.jar
deleted file mode 100644
index a6e9063..0000000
Binary files a/contrib/views/slider/lib/org/apache/slider/slider-core/0.41.0/slider-core-0.41.0.jar and /dev/null differ

http://git-wip-us.apache.org/repos/asf/ambari/blob/26cd2f4b/contrib/views/slider/lib/org/apache/slider/slider-core/0.41.0/slider-core-0.41.0.pom
----------------------------------------------------------------------
diff --git a/contrib/views/slider/lib/org/apache/slider/slider-core/0.41.0/slider-core-0.41.0.pom b/contrib/views/slider/lib/org/apache/slider/slider-core/0.41.0/slider-core-0.41.0.pom
deleted file mode 100644
index bdd574b..0000000
--- a/contrib/views/slider/lib/org/apache/slider/slider-core/0.41.0/slider-core-0.41.0.pom
+++ /dev/null
@@ -1,25 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!--
-   Licensed to the Apache Software Foundation (ASF) under one or more
-   contributor license agreements.  See the NOTICE file distributed with
-   this work for additional information regarding copyright ownership.
-   The ASF licenses this file to You under the Apache License, Version 2.0
-   (the "License"); you may not use this file except in compliance with
-   the License.  You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-   Unless required by applicable law or agreed to in writing, software
-   distributed under the License is distributed on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-   See the License for the specific language governing permissions and
-   limitations under the License.
--->
-<project xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd" xmlns="http://maven.apache.org/POM/4.0.0"
-    xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
-  <modelVersion>4.0.0</modelVersion>
-  <groupId>org.apache.slider</groupId>
-  <artifactId>slider-core</artifactId>
-  <version>0.41.0</version>
-  <description>POM was created from install:install-file</description>
-</project>

http://git-wip-us.apache.org/repos/asf/ambari/blob/26cd2f4b/contrib/views/slider/lib/org/apache/slider/slider-core/0.51.0/slider-core-0.51.0.jar
----------------------------------------------------------------------
diff --git a/contrib/views/slider/lib/org/apache/slider/slider-core/0.51.0/slider-core-0.51.0.jar b/contrib/views/slider/lib/org/apache/slider/slider-core/0.51.0/slider-core-0.51.0.jar
new file mode 100644
index 0000000..e7d7665
Binary files /dev/null and b/contrib/views/slider/lib/org/apache/slider/slider-core/0.51.0/slider-core-0.51.0.jar differ

http://git-wip-us.apache.org/repos/asf/ambari/blob/26cd2f4b/contrib/views/slider/lib/org/apache/slider/slider-core/0.51.0/slider-core-0.51.0.pom
----------------------------------------------------------------------
diff --git a/contrib/views/slider/lib/org/apache/slider/slider-core/0.51.0/slider-core-0.51.0.pom b/contrib/views/slider/lib/org/apache/slider/slider-core/0.51.0/slider-core-0.51.0.pom
new file mode 100644
index 0000000..6168fff
--- /dev/null
+++ b/contrib/views/slider/lib/org/apache/slider/slider-core/0.51.0/slider-core-0.51.0.pom
@@ -0,0 +1,25 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<project xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd" xmlns="http://maven.apache.org/POM/4.0.0"
+    xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
+  <modelVersion>4.0.0</modelVersion>
+  <groupId>org.apache.slider</groupId>
+  <artifactId>slider-core</artifactId>
+  <version>0.51.0</version>
+  <description>POM was created from install:install-file</description>
+</project>

http://git-wip-us.apache.org/repos/asf/ambari/blob/26cd2f4b/contrib/views/slider/lib/org/apache/slider/slider-core/maven-metadata-local.xml
----------------------------------------------------------------------
diff --git a/contrib/views/slider/lib/org/apache/slider/slider-core/maven-metadata-local.xml b/contrib/views/slider/lib/org/apache/slider/slider-core/maven-metadata-local.xml
index 7240a2e..eaf2be4 100644
--- a/contrib/views/slider/lib/org/apache/slider/slider-core/maven-metadata-local.xml
+++ b/contrib/views/slider/lib/org/apache/slider/slider-core/maven-metadata-local.xml
@@ -19,10 +19,10 @@
   <groupId>org.apache.slider</groupId>
   <artifactId>slider-core</artifactId>
   <versioning>
-    <release>0.41.0</release>
+    <release>0.51.0</release>
     <versions>
-      <version>0.41.0</version>
+      <version>0.51.0</version>
     </versions>
-    <lastUpdated>20140905131533</lastUpdated>
+    <lastUpdated>20140911131533</lastUpdated>
   </versioning>
 </metadata>

http://git-wip-us.apache.org/repos/asf/ambari/blob/26cd2f4b/contrib/views/slider/lib/slider-agent.tar.gz
----------------------------------------------------------------------
diff --git a/contrib/views/slider/lib/slider-agent.tar.gz b/contrib/views/slider/lib/slider-agent.tar.gz
new file mode 100644
index 0000000..d108e4b
Binary files /dev/null and b/contrib/views/slider/lib/slider-agent.tar.gz differ

http://git-wip-us.apache.org/repos/asf/ambari/blob/26cd2f4b/contrib/views/slider/pom.xml
----------------------------------------------------------------------
diff --git a/contrib/views/slider/pom.xml b/contrib/views/slider/pom.xml
index ec4089f..cfb0abc 100644
--- a/contrib/views/slider/pom.xml
+++ b/contrib/views/slider/pom.xml
@@ -403,7 +403,7 @@
 		<zookeeper.version>3.4.5</zookeeper.version>
 		<jetty.version>6.1.26</jetty.version>
 		<metrics.version>3.0.1</metrics.version>
-		<slider.version>0.41.0</slider.version>
+		<slider.version>0.51.0</slider.version>
 	</properties>
 
 	<build>

http://git-wip-us.apache.org/repos/asf/ambari/blob/26cd2f4b/contrib/views/slider/src/main/java/org/apache/ambari/view/slider/SliderAppsResourceProvider.java
----------------------------------------------------------------------
diff --git a/contrib/views/slider/src/main/java/org/apache/ambari/view/slider/SliderAppsResourceProvider.java b/contrib/views/slider/src/main/java/org/apache/ambari/view/slider/SliderAppsResourceProvider.java
index 8d47769..0c0dd12 100644
--- a/contrib/views/slider/src/main/java/org/apache/ambari/view/slider/SliderAppsResourceProvider.java
+++ b/contrib/views/slider/src/main/java/org/apache/ambari/view/slider/SliderAppsResourceProvider.java
@@ -61,7 +61,10 @@ public class SliderAppsResourceProvider implements ResourceProvider<SliderApp> {
 		} catch (IOException e) {
 			logger.warn("Unable to delete Slider app with id " + resourceId, e);
 			throw new SystemException(e.getMessage(), e);
-		}
+		} catch (InterruptedException e) {
+		    logger.warn("Unable to delete Slider app with id " + resourceId, e);
+            throw new SystemException(e.getMessage(), e);
+        }
 	}
 
 	@Override
@@ -80,7 +83,10 @@ public class SliderAppsResourceProvider implements ResourceProvider<SliderApp> {
 		} catch (IOException e) {
 			logger.warn("Unable to determine Slider app with id " + resourceId, e);
 			throw new SystemException(e.getMessage(), e);
-		}
+		} catch (InterruptedException e) {
+		    logger.warn("Unable to determine Slider app with id " + resourceId, e);
+            throw new SystemException(e.getMessage(), e);
+        }
 	}
 
 	@Override
@@ -99,7 +105,10 @@ public class SliderAppsResourceProvider implements ResourceProvider<SliderApp> {
 		} catch (IOException e) {
 			logger.warn("Unable to determine Slider apps", e);
 			throw new SystemException(e.getMessage(), e);
-		}
+		} catch (InterruptedException e) {
+		    logger.warn("Unable to determine Slider apps", e);
+            throw new SystemException(e.getMessage(), e);
+        }
 		return appSet;
 	}
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/26cd2f4b/contrib/views/slider/src/main/java/org/apache/ambari/view/slider/SliderAppsViewController.java
----------------------------------------------------------------------
diff --git a/contrib/views/slider/src/main/java/org/apache/ambari/view/slider/SliderAppsViewController.java b/contrib/views/slider/src/main/java/org/apache/ambari/view/slider/SliderAppsViewController.java
index 70e333f..93c63f2 100644
--- a/contrib/views/slider/src/main/java/org/apache/ambari/view/slider/SliderAppsViewController.java
+++ b/contrib/views/slider/src/main/java/org/apache/ambari/view/slider/SliderAppsViewController.java
@@ -47,9 +47,10 @@ public interface SliderAppsViewController {
    * @return
    * @throws YarnException
    * @throws IOException
+   * @throws InterruptedException 
    */
   public SliderApp getSliderApp(String applicationId, Set<String> properties)
-      throws YarnException, IOException;
+      throws YarnException, IOException, InterruptedException;
 
   /**
    * Provides list of Slider apps with requested properties populated.
@@ -60,9 +61,10 @@ public interface SliderAppsViewController {
    * @return
    * @throws YarnException
    * @throws IOException
+   * @throws InterruptedException 
    */
   public List<SliderApp> getSliderApps(Set<String> properties)
-      throws YarnException, IOException;
+      throws YarnException, IOException, InterruptedException;
 
   /**
    * Attempts to delete a Slider app. An unsuccessful attempt will result in
@@ -71,9 +73,10 @@ public interface SliderAppsViewController {
    * @param applicationId
    * @throws YarnException
    * @throws IOException
+   * @throws InterruptedException 
    */
   public void deleteSliderApp(String applicationId) throws YarnException,
-      IOException;
+      IOException, InterruptedException;
 
   public SliderAppType getSliderAppType(String appTypeId, Set<String> properties);
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/26cd2f4b/contrib/views/slider/src/main/java/org/apache/ambari/view/slider/SliderAppsViewControllerImpl.java
----------------------------------------------------------------------
diff --git a/contrib/views/slider/src/main/java/org/apache/ambari/view/slider/SliderAppsViewControllerImpl.java b/contrib/views/slider/src/main/java/org/apache/ambari/view/slider/SliderAppsViewControllerImpl.java
index f17b66d..c4871f2 100644
--- a/contrib/views/slider/src/main/java/org/apache/ambari/view/slider/SliderAppsViewControllerImpl.java
+++ b/contrib/views/slider/src/main/java/org/apache/ambari/view/slider/SliderAppsViewControllerImpl.java
@@ -59,6 +59,7 @@ import org.apache.slider.common.params.ActionCreateArgs;
 import org.apache.slider.common.params.ActionFreezeArgs;
 import org.apache.slider.common.params.ActionThawArgs;
 import org.apache.slider.common.tools.SliderFileSystem;
+import org.apache.slider.core.exceptions.SliderException;
 import org.apache.slider.core.exceptions.UnknownApplicationInstanceException;
 import org.apache.slider.core.main.LauncherExitCodes;
 import org.apache.slider.providers.agent.application.metadata.Application;
@@ -119,22 +120,45 @@ public class SliderAppsViewControllerImpl implements SliderAppsViewController {
     }
     return null;
   }
-
+  
+  private static interface SliderClientContextRunnable<T> {
+    public T run(SliderClient sliderClient) throws YarnException, IOException, InterruptedException;
+  }
+  
+  private <T> T invokeSliderClientRunnable(final SliderClientContextRunnable<T> runnable) throws IOException, InterruptedException {
+    ClassLoader currentClassLoader = Thread.currentThread().getContextClassLoader();
+    Thread.currentThread().setContextClassLoader(getClass().getClassLoader());
+    try {
+      T value = UserGroupInformation.getBestUGI(null, "yarn").doAs(
+          new PrivilegedExceptionAction<T>() {
+            @Override
+            public T run() throws Exception {
+              final SliderClient sliderClient = createSliderClient();
+              try{
+                return runnable.run(sliderClient);
+              }finally{
+                destroySliderClient(sliderClient);
+              }
+            }
+          });
+      return value;
+    } finally {
+      Thread.currentThread().setContextClassLoader(currentClassLoader);
+    }
+  }
+  
   @Override
-  public SliderApp getSliderApp(String applicationId, Set<String> properties)
-      throws YarnException, IOException {
-    ApplicationId appId = getApplicationId(applicationId);
+  public SliderApp getSliderApp(String applicationId, final Set<String> properties)
+      throws YarnException, IOException, InterruptedException {
+    final ApplicationId appId = getApplicationId(applicationId);
     if (appId != null) {
-      ClassLoader currentClassLoader = Thread.currentThread()
-          .getContextClassLoader();
-      Thread.currentThread().setContextClassLoader(getClass().getClassLoader());
-      try {
-        SliderClient sliderClient = getSliderClient();
-        ApplicationReport yarnApp = sliderClient.getApplicationReport(appId);
-        return createSliderAppObject(yarnApp, properties, sliderClient);
-      } finally {
-        Thread.currentThread().setContextClassLoader(currentClassLoader);
-      }
+      return invokeSliderClientRunnable(new SliderClientContextRunnable<SliderApp>() {
+        @Override
+        public SliderApp run(SliderClient sliderClient) throws YarnException, IOException {
+          ApplicationReport yarnApp = sliderClient.getApplicationReport(appId);
+          return createSliderAppObject(yarnApp, properties, sliderClient);
+        }
+      });
     }
     return null;
   }
@@ -322,24 +346,29 @@ public class SliderAppsViewControllerImpl implements SliderAppsViewController {
 
   /**
    * Creates a new {@link SliderClient} initialized with appropriate
-   * configuration. If configuration was not determined, <code>null</code> is
-   * returned.
+   * configuration and started. This slider client can be used to invoke
+   * individual API.
    * 
-   * @return
+   * When work with this client is done,
+   * {@link #destroySliderClient(SliderClient)} must be called.
+   * 
+   * @return created {@link SliderClient}
+   * @see #destroySliderClient(SliderClient)
+   * @see #runSliderCommand(String...)
    */
-  protected SliderClient getSliderClient() {
+  protected SliderClient createSliderClient() {
     Configuration sliderClientConfiguration = getSliderClientConfiguration();
-    if (sliderClientConfiguration != null) {
-      SliderClient client = new SliderClient() {
-        @Override
-        public String getUsername() throws IOException {
-          return "yarn";
-        }
+    SliderClient client = new SliderClient() {
+      @Override
+      public String getUsername() throws IOException {
+        return "yarn";
+      }
 
-        @Override
-        protected void serviceInit(Configuration conf) throws Exception {
-          super.serviceInit(conf);
-          // Override the default FS client to set the super user.
+      @Override
+      protected void initHadoopBinding() throws IOException, SliderException {
+        super.initHadoopBinding();
+        // Override the default FS client to the calling user
+        try {
           FileSystem fs = FileSystem.get(FileSystem.getDefaultUri(getConfig()),
               getConfig(), "yarn");
           SliderFileSystem fileSystem = new SliderFileSystem(fs, getConfig());
@@ -347,20 +376,49 @@ public class SliderAppsViewControllerImpl implements SliderAppsViewController {
               .getDeclaredField("sliderFileSystem");
           fsField.setAccessible(true);
           fsField.set(this, fileSystem);
+        } catch (InterruptedException e) {
+          throw new SliderException("Slider view unable to override filesystem of Slider client", e);
+        } catch (NoSuchFieldException e) {
+          throw new SliderException("Slider view unable to override filesystem of Slider client", e);
+        } catch (SecurityException e) {
+          throw new SliderException("Slider view unable to override filesystem of Slider client", e);
+        } catch (IllegalArgumentException e) {
+          throw new SliderException("Slider view unable to override filesystem of Slider client", e);
+        } catch (IllegalAccessException e) {
+          throw new SliderException("Slider view unable to override filesystem of Slider client", e);
+        }
+      }
+
+      @Override
+      public void init(Configuration conf) {
+        super.init(conf);
+        try {
+          initHadoopBinding();
+        } catch (SliderException e) {
+          throw new RuntimeException("Unable to automatically init Hadoop binding", e);
+        } catch (IOException e) {
+          throw new RuntimeException("Unable to automatically init Hadoop binding", e);
         }
-      };
-      try {
-        sliderClientConfiguration = client.bindArgs(sliderClientConfiguration,
-            new String[] { "usage" });
-      } catch (Exception e) {
-        logger.warn("Unable to set SliderClient configs", e);
-        throw new RuntimeException(e.getMessage(), e);
       }
+    };
+    try {
+      sliderClientConfiguration = client.bindArgs(sliderClientConfiguration,
+          new String[] { "usage" });
       client.init(sliderClientConfiguration);
       client.start();
-      return client;
+    } catch (Exception e) {
+      logger.warn("Unable to create SliderClient", e);
+      throw new RuntimeException(e.getMessage(), e);
+    } catch (Throwable e) {
+      logger.warn("Unable to create SliderClient", e);
+      throw new RuntimeException(e.getMessage(), e);
     }
-    return null;
+    return client;
+  }
+
+  protected void destroySliderClient(SliderClient sliderClient) {
+    sliderClient.stop();
+    sliderClient = null;
   }
 
   /**
@@ -389,58 +447,54 @@ public class SliderAppsViewControllerImpl implements SliderAppsViewController {
   }
 
   @Override
-  public List<SliderApp> getSliderApps(Set<String> properties)
-      throws YarnException, IOException {
-    List<SliderApp> sliderApps = new ArrayList<SliderApp>();
-    ClassLoader currentClassLoader = Thread.currentThread()
-        .getContextClassLoader();
-    Thread.currentThread().setContextClassLoader(getClass().getClassLoader());
-    try {
-      Map<String, SliderApp> sliderAppsMap = new HashMap<String, SliderApp>();
-      SliderClient sliderClient = getSliderClient();
-      List<ApplicationReport> yarnApps = sliderClient.listSliderInstances(null);
-      for (ApplicationReport yarnApp : yarnApps) {
-        SliderApp sliderAppObject = createSliderAppObject(yarnApp, properties,
-            sliderClient);
-        if (sliderAppObject != null) {
-          if (sliderAppsMap.containsKey(sliderAppObject.getName())) {
-            if (sliderAppsMap.get(sliderAppObject.getName()).getId()
-                .compareTo(sliderAppObject.getId()) < 0) {
+  public List<SliderApp> getSliderApps(final Set<String> properties)
+      throws YarnException, IOException, InterruptedException {
+    return invokeSliderClientRunnable(new SliderClientContextRunnable<List<SliderApp>>() {
+      @Override
+      public List<SliderApp> run(SliderClient sliderClient)
+          throws YarnException, IOException {
+        List<SliderApp> sliderApps = new ArrayList<SliderApp>();
+        Map<String, SliderApp> sliderAppsMap = new HashMap<String, SliderApp>();
+        List<ApplicationReport> yarnApps = sliderClient.listSliderInstances(null);
+        for (ApplicationReport yarnApp : yarnApps) {
+          SliderApp sliderAppObject = createSliderAppObject(yarnApp, properties,
+              sliderClient);
+          if (sliderAppObject != null) {
+            if (sliderAppsMap.containsKey(sliderAppObject.getName())) {
+              if (sliderAppsMap.get(sliderAppObject.getName()).getId()
+                  .compareTo(sliderAppObject.getId()) < 0) {
+                sliderAppsMap.put(sliderAppObject.getName(), sliderAppObject);
+              }
+            } else {
               sliderAppsMap.put(sliderAppObject.getName(), sliderAppObject);
             }
-          } else {
-            sliderAppsMap.put(sliderAppObject.getName(), sliderAppObject);
           }
         }
+        if (sliderAppsMap.size() > 0)
+          sliderApps.addAll(sliderAppsMap.values());
+        return sliderApps;
       }
-      if (sliderAppsMap.size() > 0)
-        sliderApps.addAll(sliderAppsMap.values());
-    } finally {
-      Thread.currentThread().setContextClassLoader(currentClassLoader);
-    }
-    return sliderApps;
+    });
   }
 
   @Override
-  public void deleteSliderApp(String applicationId) throws YarnException,
-      IOException {
-    ClassLoader currentClassLoader = Thread.currentThread()
-        .getContextClassLoader();
-    Thread.currentThread().setContextClassLoader(getClass().getClassLoader());
-    try {
-      Set<String> properties = new HashSet<String>();
-      properties.add("id");
-      properties.add("name");
-      SliderApp sliderApp = getSliderApp(applicationId, properties);
-      if (sliderApp == null) {
-        throw new ApplicationNotFoundException(applicationId);
+  public void deleteSliderApp(final String applicationId) throws YarnException,
+      IOException, InterruptedException {
+    Integer code = invokeSliderClientRunnable(new SliderClientContextRunnable<Integer>() {
+      @Override
+      public Integer run(SliderClient sliderClient) throws YarnException,
+          IOException, InterruptedException {
+        Set<String> properties = new HashSet<String>();
+        properties.add("id");
+        properties.add("name");
+        SliderApp sliderApp = getSliderApp(applicationId, properties);
+        if (sliderApp == null) {
+          throw new ApplicationNotFoundException(applicationId);
+        }
+        return sliderClient.actionDestroy(sliderApp.getName());
       }
-
-      SliderClient sliderClient = getSliderClient();
-      sliderClient.actionDestroy(sliderApp.getName());
-    } finally {
-      Thread.currentThread().setContextClassLoader(currentClassLoader);
-    }
+    });
+    logger.info("Deleted Slider App [" + applicationId + "] with exit code " + code);
   }
 
   @Override
@@ -623,25 +677,18 @@ public class SliderAppsViewControllerImpl implements SliderAppsViewController {
       createArgs.resources = resourcesJsonFile;
       createArgs.image = new Path(hdfsLocation
           + "/user/yarn/agent/slider-agent.tar.gz");
-
-      ClassLoader currentClassLoader = Thread.currentThread()
-          .getContextClassLoader();
-      Thread.currentThread().setContextClassLoader(getClass().getClassLoader());
-      try {
-        ApplicationId applicationId = UserGroupInformation.getBestUGI(null,
-            "yarn").doAs(new PrivilegedExceptionAction<ApplicationId>() {
-          public ApplicationId run() throws IOException, YarnException {
-            SliderClient sliderClient = getSliderClient();
-            sliderClient.actionCreate(appName, createArgs);
-            return sliderClient.applicationId;
+      
+      return invokeSliderClientRunnable(new SliderClientContextRunnable<String>() {
+        @Override
+        public String run(SliderClient sliderClient) throws YarnException, IOException, InterruptedException {
+          sliderClient.actionCreate(appName, createArgs);
+          ApplicationId applicationId = sliderClient.applicationId;
+          if (applicationId != null) {
+            return getApplicationIdString(applicationId);
           }
-        });
-        if (applicationId != null) {
-          return getApplicationIdString(applicationId);
+          return null;
         }
-      } finally {
-        Thread.currentThread().setContextClassLoader(currentClassLoader);
-      }
+      });
     }
     return null;
   }
@@ -711,59 +758,42 @@ public class SliderAppsViewControllerImpl implements SliderAppsViewController {
   }
 
   @Override
-  public void freezeApp(String appId) throws YarnException, IOException,
+  public void freezeApp(final String appId) throws YarnException, IOException,
       InterruptedException {
-    ClassLoader currentClassLoader = Thread.currentThread()
-        .getContextClassLoader();
-    Thread.currentThread().setContextClassLoader(getClass().getClassLoader());
-    try {
-      Set<String> properties = new HashSet<String>();
-      properties.add("id");
-      properties.add("name");
-      final SliderApp sliderApp = getSliderApp(appId, properties);
-      if (sliderApp == null)
-        throw new ApplicationNotFoundException(appId);
-
-      ApplicationId applicationId = UserGroupInformation.getBestUGI(null,
-          "yarn").doAs(new PrivilegedExceptionAction<ApplicationId>() {
-        public ApplicationId run() throws IOException, YarnException {
-          SliderClient sliderClient = getSliderClient();
-          ActionFreezeArgs freezeArgs = new ActionFreezeArgs();
-          sliderClient.actionFreeze(sliderApp.getName(), freezeArgs);
-          return sliderClient.applicationId;
-        }
-      });
-      logger.debug("Slider app has been frozen - " + applicationId.toString());
-    } finally {
-      Thread.currentThread().setContextClassLoader(currentClassLoader);
-    }
+    ApplicationId applicationId = invokeSliderClientRunnable(new SliderClientContextRunnable<ApplicationId>() {
+      @Override
+      public ApplicationId run(SliderClient sliderClient) throws YarnException, IOException, InterruptedException {
+        Set<String> properties = new HashSet<String>();
+        properties.add("id");
+        properties.add("name");
+        final SliderApp sliderApp = getSliderApp(appId, properties);
+        if (sliderApp == null)
+          throw new ApplicationNotFoundException(appId);
+        ActionFreezeArgs freezeArgs = new ActionFreezeArgs();
+        sliderClient.actionFreeze(sliderApp.getName(), freezeArgs);
+        return sliderClient.applicationId;
+      }
+    });
+    logger.info("Frozen Slider App [" + appId + "] with response: " + applicationId.toString());
   }
 
   @Override
-  public void thawApp(String appId) throws YarnException, IOException,
-      InterruptedException {
-    ClassLoader currentClassLoader = Thread.currentThread()
-        .getContextClassLoader();
-    Thread.currentThread().setContextClassLoader(getClass().getClassLoader());
-    try {
-      Set<String> properties = new HashSet<String>();
-      properties.add("id");
-      properties.add("name");
-      final SliderApp sliderApp = getSliderApp(appId, properties);
-      if (sliderApp == null)
-        throw new ApplicationNotFoundException(appId);
-      ApplicationId applicationId = UserGroupInformation.getBestUGI(null,
-          "yarn").doAs(new PrivilegedExceptionAction<ApplicationId>() {
-        public ApplicationId run() throws IOException, YarnException {
-          SliderClient sliderClient = getSliderClient();
-          ActionThawArgs thawArgs = new ActionThawArgs();
-          sliderClient.actionThaw(sliderApp.getName(), thawArgs);
-          return sliderClient.applicationId;
-        }
-      });
-      logger.debug("Slider app has been thawed - " + applicationId.toString());
-    } finally {
-      Thread.currentThread().setContextClassLoader(currentClassLoader);
-    }
+  public void thawApp(final String appId) throws YarnException, IOException, InterruptedException {
+    ApplicationId applicationId = invokeSliderClientRunnable(new SliderClientContextRunnable<ApplicationId>() {
+      @Override
+      public ApplicationId run(SliderClient sliderClient) throws YarnException,
+          IOException, InterruptedException {
+        Set<String> properties = new HashSet<String>();
+        properties.add("id");
+        properties.add("name");
+        final SliderApp sliderApp = getSliderApp(appId, properties);
+        if (sliderApp == null)
+          throw new ApplicationNotFoundException(appId);
+        ActionThawArgs thawArgs = new ActionThawArgs();
+        sliderClient.actionThaw(sliderApp.getName(), thawArgs);
+        return sliderClient.applicationId;
+      }
+    });
+    logger.info("Thawed Slider App [" + appId + "] with response: " + applicationId.toString());
   }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/26cd2f4b/contrib/views/slider/src/main/java/org/apache/ambari/view/slider/rest/SliderAppsResource.java
----------------------------------------------------------------------
diff --git a/contrib/views/slider/src/main/java/org/apache/ambari/view/slider/rest/SliderAppsResource.java b/contrib/views/slider/src/main/java/org/apache/ambari/view/slider/rest/SliderAppsResource.java
index 4459db2..68f363b 100644
--- a/contrib/views/slider/src/main/java/org/apache/ambari/view/slider/rest/SliderAppsResource.java
+++ b/contrib/views/slider/src/main/java/org/apache/ambari/view/slider/rest/SliderAppsResource.java
@@ -72,7 +72,7 @@ public class SliderAppsResource {
   @DELETE
   @Path("{appId}")
   public void deleteApp(@Context HttpHeaders headers, @Context UriInfo uri,
-      @PathParam("appId") String appId) throws YarnException, IOException {
+      @PathParam("appId") String appId) throws YarnException, IOException, InterruptedException {
     sliderAppsViewController.deleteSliderApp(appId);
   }
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/26cd2f4b/contrib/views/slider/src/main/java/org/apache/ambari/view/slider/rest/client/Metric.java
----------------------------------------------------------------------
diff --git a/contrib/views/slider/src/main/java/org/apache/ambari/view/slider/rest/client/Metric.java b/contrib/views/slider/src/main/java/org/apache/ambari/view/slider/rest/client/Metric.java
index e92d174..4bbeb2f 100644
--- a/contrib/views/slider/src/main/java/org/apache/ambari/view/slider/rest/client/Metric.java
+++ b/contrib/views/slider/src/main/java/org/apache/ambari/view/slider/rest/client/Metric.java
@@ -19,11 +19,13 @@
 package org.apache.ambari.view.slider.rest.client;
 
 import org.apache.log4j.Logger;
+import org.codehaus.jackson.annotate.JsonIgnore;
 
 import javax.xml.xpath.XPath;
 import javax.xml.xpath.XPathExpression;
 import javax.xml.xpath.XPathExpressionException;
 import javax.xml.xpath.XPathFactory;
+
 import java.util.ArrayList;
 import java.util.List;
 
@@ -73,6 +75,7 @@ public class Metric {
     this.temporal = temporal;
   }
 
+  @JsonIgnore
   public XPathExpression getxPathExpression() {
     if (!xPathExpressionComputed) {
       XPathFactory xPathfactory = XPathFactory.newInstance();


[20/27] git commit: AMBARI-7296. HCatalog and WebHCat services should not be managed as separate service (should be part of Hive service) (jaimin)

Posted by jo...@apache.org.
AMBARI-7296. HCatalog and WebHCat services should not be managed as separate service (should be part of Hive service) (jaimin)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/601014ed
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/601014ed
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/601014ed

Branch: refs/heads/branch-alerts-dev
Commit: 601014ed8b047c676ece41016a3278c19703794f
Parents: 610bb1e
Author: Jaimin Jetly <ja...@hortonworks.com>
Authored: Mon Sep 15 12:12:17 2014 -0700
Committer: Jaimin Jetly <ja...@hortonworks.com>
Committed: Mon Sep 15 12:12:46 2014 -0700

----------------------------------------------------------------------
 .../ambari_agent/TestActualConfigHandler.py     |   6 +-
 .../test/python/ambari_agent/TestLiveStatus.py  |   4 +-
 ambari-server/docs/api/v1/services.md           |   7 -
 .../internal/BaseBlueprintProcessor.java        |   2 +-
 .../ambari/server/metadata/ActionMetadata.java  |   1 -
 .../org/apache/ambari/server/state/Service.java |   8 +-
 .../server/upgrade/UpgradeCatalog170.java       | 190 +++++++++----
 .../custom_actions/validate_configs.py          |   2 -
 .../stacks/HDP/1.3.2/role_command_order.json    |   4 +-
 .../services/HIVE/configuration/webhcat-env.xml |  54 ++++
 .../HIVE/configuration/webhcat-site.xml         | 156 +++++++++++
 .../stacks/HDP/1.3.2/services/HIVE/metainfo.xml | 154 ++++++-----
 .../HIVE/package/files/templetonSmoke.sh        |  96 +++++++
 .../services/HIVE/package/scripts/params.py     |  31 ++-
 .../HIVE/package/scripts/service_check.py       |   2 +
 .../HIVE/package/scripts/status_params.py       |   3 +
 .../services/HIVE/package/scripts/webhcat.py    | 107 ++++++++
 .../HIVE/package/scripts/webhcat_server.py      |  53 ++++
 .../HIVE/package/scripts/webhcat_service.py     |  40 +++
 .../package/scripts/webhcat_service_check.py    |  42 +++
 .../HDP/1.3.2/services/NAGIOS/metainfo.xml      |   2 +-
 .../templates/hadoop-servicegroups.cfg.j2       |   6 -
 .../package/templates/hadoop-services.cfg.j2    |   2 +-
 .../WEBHCAT/configuration/webhcat-env.xml       |  54 ----
 .../WEBHCAT/configuration/webhcat-site.xml      | 156 -----------
 .../HDP/1.3.2/services/WEBHCAT/metainfo.xml     | 103 -------
 .../WEBHCAT/package/files/templetonSmoke.sh     |  96 -------
 .../WEBHCAT/package/scripts/__init__.py         |  20 --
 .../services/WEBHCAT/package/scripts/params.py  |  78 ------
 .../WEBHCAT/package/scripts/service_check.py    |  44 ---
 .../WEBHCAT/package/scripts/status_params.py    |  26 --
 .../services/WEBHCAT/package/scripts/webhcat.py | 107 --------
 .../WEBHCAT/package/scripts/webhcat_server.py   |  53 ----
 .../WEBHCAT/package/scripts/webhcat_service.py  |  40 ---
 .../stacks/HDP/1.3/role_command_order.json      |   4 +-
 .../stacks/HDP/1.3/services/HIVE/metainfo.xml   |   5 -
 .../stacks/HDP/2.0.6/role_command_order.json    |   4 +-
 .../services/HIVE/configuration/webhcat-env.xml |  54 ++++
 .../HIVE/configuration/webhcat-site.xml         | 138 ++++++++++
 .../stacks/HDP/2.0.6/services/HIVE/metainfo.xml | 165 ++++++-----
 .../HIVE/package/files/templetonSmoke.sh        |  96 +++++++
 .../services/HIVE/package/scripts/params.py     |  44 ++-
 .../HIVE/package/scripts/service_check.py       |   2 +
 .../HIVE/package/scripts/status_params.py       |   1 +
 .../services/HIVE/package/scripts/webhcat.py    | 112 ++++++++
 .../HIVE/package/scripts/webhcat_server.py      |  53 ++++
 .../HIVE/package/scripts/webhcat_service.py     |  40 +++
 .../package/scripts/webhcat_service_check.py    |  41 +++
 .../HDP/2.0.6/services/NAGIOS/metainfo.xml      |   2 +-
 .../templates/hadoop-servicegroups.cfg.j2       |   8 +-
 .../package/templates/hadoop-services.cfg.j2    |   2 +-
 .../WEBHCAT/configuration/webhcat-env.xml       |  54 ----
 .../WEBHCAT/configuration/webhcat-site.xml      | 138 ----------
 .../HDP/2.0.6/services/WEBHCAT/metainfo.xml     | 110 --------
 .../WEBHCAT/package/files/templetonSmoke.sh     |  96 -------
 .../WEBHCAT/package/scripts/__init__.py         |  20 --
 .../services/WEBHCAT/package/scripts/params.py  | 102 -------
 .../WEBHCAT/package/scripts/service_check.py    |  45 ---
 .../WEBHCAT/package/scripts/status_params.py    |  26 --
 .../services/WEBHCAT/package/scripts/webhcat.py | 112 --------
 .../WEBHCAT/package/scripts/webhcat_server.py   |  53 ----
 .../WEBHCAT/package/scripts/webhcat_service.py  |  40 ---
 .../HDP/2.1.GlusterFS/role_command_order.json   |   4 +-
 .../2.1.GlusterFS/services/HIVE/metainfo.xml    | 100 ++++---
 .../WEBHCAT/configuration/webhcat-site.xml      | 143 ----------
 .../2.1.GlusterFS/services/WEBHCAT/metainfo.xml |  46 ----
 .../stacks/HDP/2.1/role_command_order.json      |   3 +-
 .../stacks/HDP/2.1/services/HIVE/metainfo.xml   |  62 ++++-
 .../WEBHCAT/configuration/webhcat-site.xml      | 143 ----------
 .../HDP/2.1/services/WEBHCAT/metainfo.xml       |  47 ----
 .../stacks/HDP/2.2.1/services/HIVE/metainfo.xml |   4 -
 .../HDP/2.2.1/services/WEBHCAT/metainfo.xml     |  26 --
 .../HIVE/configuration/webhcat-site.xml         |  59 ++++
 .../stacks/HDP/2.2/services/HIVE/metainfo.xml   |  29 +-
 .../WEBHCAT/configuration/webhcat-site.xml      |  59 ----
 .../api/util/StackExtensionHelperTest.java      |  29 +-
 .../AmbariManagementControllerTest.java         | 123 ++-------
 .../internal/BaseBlueprintProcessorTest.java    |  27 +-
 .../server/upgrade/UpgradeCatalog170Test.java   | 241 +++++++++++-----
 .../1.3.2/HIVE/test_hive_service_check.py       |  20 ++
 .../stacks/1.3.2/HIVE/test_webhcat_server.py    | 258 ++++++++++++++++++
 .../stacks/1.3.2/WEBHCAT/test_webhcat_server.py | 258 ------------------
 .../1.3.2/WEBHCAT/test_webhcat_service_check.py |  61 -----
 .../2.0.6/HIVE/test_hive_service_check.py       |  21 ++
 .../stacks/2.0.6/HIVE/test_webhcat_server.py    | 270 ++++++++++++++++++
 .../stacks/2.0.6/WEBHCAT/test_webhcat_server.py | 273 -------------------
 .../2.0.6/WEBHCAT/test_webhcat_service_check.py |  61 -----
 .../stacks/HDP/2.0.5/services/HIVE/metainfo.xml |  74 +++++
 .../WEBHCAT/configuration/webhcat-site.xml      | 126 ---------
 .../HDP/2.0.5/services/WEBHCAT/metainfo.xml     | 102 -------
 .../HDP/2.0.6/services/WEBHCAT/metainfo.xml     |  28 --
 ambari-web/app/assets/data/alerts/alerts.json   |   2 +-
 .../app/assets/data/dashboard/services.json     | 117 --------
 .../app/assets/data/hosts/HDP2/hosts.json       |  11 +-
 .../data/services/HDP2/components_state.json    |   9 -
 .../app/assets/data/services/HDP2/services.json |  16 --
 .../services/host_component_actual_configs.json | 118 +-------
 .../data/stacks/HDP-2.1/recommendations.json    |   2 +-
 .../stacks/HDP-2.1/recommendations_configs.json |   2 +-
 .../data/stacks/HDP-2.1/service_components.json | 218 ++++++---------
 .../data/wizard/stack/hdp/version/1.2.0.json    |  18 --
 .../data/wizard/stack/hdp/version/1.2.1.json    |  11 -
 .../data/wizard/stack/hdp/version/1.3.0.json    |  11 -
 .../data/wizard/stack/hdp/version/1.3.1.json    |  11 -
 .../data/wizard/stack/hdp/version/2.0.1.json    |  11 -
 .../data/wizard/stack/hdp/version/2.0.5.json    |  11 -
 .../data/wizard/stack/hdp/version0.1.json       |  17 --
 .../wizard/stack/hdp/version01/HCATALOG.json    |  20 --
 .../wizard/stack/hdp/version1.2.1/HCATALOG.json |   4 -
 .../wizard/stack/hdp/version1.3.0/HCATALOG.json |   4 -
 .../wizard/stack/hdp/version131/HCATALOG.json   |   4 -
 .../wizard/stack/hdp/version2.0.1/HCATALOG.json |   4 -
 .../app/assets/data/wizard/stack/stacks.json    |  55 ----
 .../main/admin/security/add/step2.js            |   4 +-
 .../main/admin/serviceAccounts_controller.js    |   2 +-
 .../controllers/main/service/info/configs.js    |   8 +-
 ambari-web/app/controllers/main/service/item.js |   2 +-
 .../app/controllers/wizard/step7_controller.js  |   3 +-
 .../app/controllers/wizard/step8_controller.js  |   3 +-
 ambari-web/app/data/HDP2/secure_configs.js      |  15 +-
 ambari-web/app/data/HDP2/secure_mapping.js      |  12 +-
 ambari-web/app/data/HDP2/secure_properties.js   |   8 +-
 ambari-web/app/data/HDP2/site_properties.js     |  22 +-
 ambari-web/app/data/secure_configs.js           |  15 +-
 ambari-web/app/data/secure_mapping.js           |  12 +-
 ambari-web/app/data/secure_properties.js        |   8 +-
 ambari-web/app/data/site_properties.js          |  22 +-
 .../app/mixins/wizard/addSecurityConfigs.js     |   2 +-
 ambari-web/app/models/service.js                |  12 -
 ambari-web/app/models/stack_service.js          |  15 +-
 .../app/utils/batch_scheduled_requests.js       |   1 -
 ambari-web/app/utils/config.js                  |   2 +-
 ambari-web/app/utils/helper.js                  |   1 -
 .../app/views/main/host/configs_service_menu.js |   2 +-
 .../app/views/main/service/info/summary.js      |   2 +-
 ambari-web/app/views/main/service/item.js       |   2 +-
 ambari-web/app/views/main/service/menu.js       |   8 +-
 .../security/add/addSecurity_controller_test.js |   2 -
 .../main/service/info/config_test.js            |   8 +-
 .../test/controllers/wizard/step4_test.js       |  34 +--
 .../test/controllers/wizard/step8_test.js       |  14 +-
 ambari-web/test/models/service_test.js          |   8 -
 ambari-web/test/models/stack_service_test.js    |   8 -
 ambari-web/test/service_components.js           | 229 ++++++----------
 ambari-web/test/utils/helper_test.js            |   1 -
 .../src/addOns/nagios/scripts/nagios_alerts.php |   8 +-
 .../apache/ambari/msi/ClusterDefinition.java    |   2 +-
 .../ambari/msi/ClusterDefinitionTest.java       |   4 -
 .../Scripts/HostComponentsDiscovery.ps1         |   3 +-
 .../assets/data/resource/service_status.json    |  12 -
 150 files changed, 2786 insertions(+), 4419 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-agent/src/test/python/ambari_agent/TestActualConfigHandler.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/test/python/ambari_agent/TestActualConfigHandler.py b/ambari-agent/src/test/python/ambari_agent/TestActualConfigHandler.py
index ca56350..07d0171 100644
--- a/ambari-agent/src/test/python/ambari_agent/TestActualConfigHandler.py
+++ b/ambari-agent/src/test/python/ambari_agent/TestActualConfigHandler.py
@@ -34,8 +34,8 @@ class TestActualConfigHandler(TestCase):
   def setUp(self):
     LiveStatus.SERVICES = [
       "HDFS", "MAPREDUCE", "GANGLIA", "HBASE",
-      "NAGIOS", "ZOOKEEPER", "OOZIE", "HCATALOG",
-      "KERBEROS", "TEMPLETON", "HIVE", "WEBHCAT",
+      "NAGIOS", "ZOOKEEPER", "OOZIE",
+      "KERBEROS", "TEMPLETON", "HIVE",
       "YARN", "MAPREDUCE2", "FLUME", "TEZ",
       "FALCON", "STORM"
     ]
@@ -108,7 +108,7 @@ class TestActualConfigHandler(TestCase):
        "componentName" : "HIVE_METASTORE"},
       {"serviceName" : "HIVE",
        "componentName" : "MYSQL_SERVER"},
-      {"serviceName" : "WEBHCAT",
+      {"serviceName" : "HIVE",
        "componentName" : "WEBHCAT_SERVER"},
       {"serviceName" : "YARN",
        "componentName" : "RESOURCEMANAGER"},

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-agent/src/test/python/ambari_agent/TestLiveStatus.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/test/python/ambari_agent/TestLiveStatus.py b/ambari-agent/src/test/python/ambari_agent/TestLiveStatus.py
index 6937da2..9dfb47a 100644
--- a/ambari-agent/src/test/python/ambari_agent/TestLiveStatus.py
+++ b/ambari-agent/src/test/python/ambari_agent/TestLiveStatus.py
@@ -36,8 +36,8 @@ class TestLiveStatus(TestCase):
     sys.stdout = out
     LiveStatus.SERVICES = [
       "HDFS", "MAPREDUCE", "GANGLIA", "HBASE",
-      "NAGIOS", "ZOOKEEPER", "OOZIE", "HCATALOG",
-      "KERBEROS", "TEMPLETON", "HIVE", "WEBHCAT",
+      "NAGIOS", "ZOOKEEPER", "OOZIE",
+      "KERBEROS", "TEMPLETON", "HIVE",
       "YARN", "MAPREDUCE2", "FLUME", "TEZ",
       "FALCON", "STORM"
     ]

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-server/docs/api/v1/services.md
----------------------------------------------------------------------
diff --git a/ambari-server/docs/api/v1/services.md b/ambari-server/docs/api/v1/services.md
index 8a9b68d..de5c203 100644
--- a/ambari-server/docs/api/v1/services.md
+++ b/ambari-server/docs/api/v1/services.md
@@ -78,13 +78,6 @@ Get the collection of the services for the cluster named "c1".
           		}
         	},
         	{
-        		"href" : "http://your.ambari.server/api/v1/clusters/c1/services/HCATALOG",
-        		"ServiceInfo" : {
-        	  		"cluster_name" : "c1",
-        	  		"service_name" : "HCATALOG"
-        	  	}
-        	},
-        	{
         		"href" : "http://your.ambari.server/api/v1/clusters/c1/services/PIG",
         		"ServiceInfo" : {
         	  		"cluster_name" : "c1",

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BaseBlueprintProcessor.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BaseBlueprintProcessor.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BaseBlueprintProcessor.java
index 5a99af8..abd22a4 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BaseBlueprintProcessor.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BaseBlueprintProcessor.java
@@ -851,7 +851,7 @@ public abstract class BaseBlueprintProcessor extends AbstractControllerResourceP
       Collection<DependencyInfo> nagiosDependencies = getDependenciesForComponent("NAGIOS_SERVER");
       for (DependencyInfo dependency : nagiosDependencies) {
         if (dependency.getComponentName().equals("HCAT")) {
-          dependencyConditionalServiceMap.put(dependency, "HCATALOG");
+          dependencyConditionalServiceMap.put(dependency, "HIVE");
         } else if (dependency.getComponentName().equals("OOZIE_CLIENT")) {
           dependencyConditionalServiceMap.put(dependency, "OOZIE");
         } else if (dependency.getComponentName().equals("YARN_CLIENT")) {

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-server/src/main/java/org/apache/ambari/server/metadata/ActionMetadata.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/metadata/ActionMetadata.java b/ambari-server/src/main/java/org/apache/ambari/server/metadata/ActionMetadata.java
index ae13bf3..8a9799d 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/metadata/ActionMetadata.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/metadata/ActionMetadata.java
@@ -42,7 +42,6 @@ public class ActionMetadata {
       Map<String, String> serviceChecks = new HashMap<String, String>();
       
       serviceChecks.put(Service.Type.ZOOKEEPER.toString(), "ZOOKEEPER_QUORUM_SERVICE_CHECK");
-      serviceChecks.put(Service.Type.HCATALOG.toString(), "HCAT_SERVICE_CHECK");
       
       SERVICE_CHECKS = Collections.unmodifiableMap(serviceChecks);
   }

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-server/src/main/java/org/apache/ambari/server/state/Service.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/Service.java b/ambari-server/src/main/java/org/apache/ambari/server/state/Service.java
index 48fd77e..63d8760 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/Service.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/Service.java
@@ -18,14 +18,13 @@
 
 package org.apache.ambari.server.state;
 
-import java.util.Map;
-import java.util.concurrent.locks.ReadWriteLock;
-
 import com.google.inject.persist.Transactional;
-
 import org.apache.ambari.server.AmbariException;
 import org.apache.ambari.server.controller.ServiceResponse;
 
+import java.util.Map;
+import java.util.concurrent.locks.ReadWriteLock;
+
 public interface Service {
 
   public String getName();
@@ -113,7 +112,6 @@ public interface Service {
     GANGLIA,
     ZOOKEEPER,
     PIG,
-    HCATALOG,
     FLUME,
     YARN,
     MAPREDUCE2

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog170.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog170.java b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog170.java
index 4a9b83f..75635cc 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog170.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog170.java
@@ -18,74 +18,30 @@
 
 package org.apache.ambari.server.upgrade;
 
-import java.lang.reflect.Type;
-
-import java.sql.Connection;
-import java.sql.PreparedStatement;
-import java.sql.ResultSet;
-import java.sql.SQLException;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Map.Entry;
-import java.util.Set;
-import java.util.Date;
-
-import javax.persistence.EntityManager;
-import javax.persistence.TypedQuery;
-import javax.persistence.criteria.CriteriaBuilder;
-import javax.persistence.criteria.CriteriaQuery;
-import javax.persistence.criteria.Expression;
-import javax.persistence.criteria.Predicate;
-import javax.persistence.criteria.Root;
-
 import com.google.common.reflect.TypeToken;
+import com.google.inject.Inject;
+import com.google.inject.Injector;
 import org.apache.ambari.server.AmbariException;
 import org.apache.ambari.server.configuration.Configuration;
 import org.apache.ambari.server.controller.AmbariManagementController;
 import org.apache.ambari.server.orm.DBAccessor.DBColumnInfo;
-import org.apache.ambari.server.orm.dao.ClusterDAO;
-import org.apache.ambari.server.orm.dao.DaoUtils;
-import org.apache.ambari.server.orm.dao.HostRoleCommandDAO;
-import org.apache.ambari.server.orm.dao.KeyValueDAO;
-import org.apache.ambari.server.orm.dao.PermissionDAO;
-import org.apache.ambari.server.orm.dao.PrincipalDAO;
-import org.apache.ambari.server.orm.dao.PrincipalTypeDAO;
-import org.apache.ambari.server.orm.dao.PrivilegeDAO;
-import org.apache.ambari.server.orm.dao.ResourceDAO;
-import org.apache.ambari.server.orm.dao.ResourceTypeDAO;
-import org.apache.ambari.server.orm.dao.ConfigGroupConfigMappingDAO;
-import org.apache.ambari.server.orm.dao.UserDAO;
-import org.apache.ambari.server.orm.dao.ViewDAO;
-import org.apache.ambari.server.orm.dao.ViewInstanceDAO;
-import org.apache.ambari.server.orm.entities.ClusterEntity;
-import org.apache.ambari.server.orm.entities.HostRoleCommandEntity;
-import org.apache.ambari.server.orm.entities.ConfigGroupConfigMappingEntity;
-import org.apache.ambari.server.orm.entities.ClusterConfigEntity;
-import org.apache.ambari.server.orm.entities.HostRoleCommandEntity_;
-import org.apache.ambari.server.orm.entities.KeyValueEntity;
-import org.apache.ambari.server.orm.entities.PermissionEntity;
-import org.apache.ambari.server.orm.entities.PrincipalEntity;
-import org.apache.ambari.server.orm.entities.PrincipalTypeEntity;
-import org.apache.ambari.server.orm.entities.PrivilegeEntity;
-import org.apache.ambari.server.orm.entities.ResourceEntity;
-import org.apache.ambari.server.orm.entities.ResourceTypeEntity;
-import org.apache.ambari.server.orm.entities.UserEntity;
-import org.apache.ambari.server.orm.entities.ViewEntity;
-import org.apache.ambari.server.orm.entities.ViewInstanceEntity;
-import org.apache.ambari.server.state.Cluster;
-import org.apache.ambari.server.state.Clusters;
-import org.apache.ambari.server.state.Config;
-import org.apache.ambari.server.state.ConfigHelper;
+import org.apache.ambari.server.orm.dao.*;
+import org.apache.ambari.server.orm.entities.*;
+import org.apache.ambari.server.state.*;
 import org.apache.ambari.server.utils.StageUtils;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import com.google.inject.Inject;
-import com.google.inject.Injector;
+import javax.persistence.EntityManager;
+import javax.persistence.TypedQuery;
+import javax.persistence.criteria.*;
+import java.lang.reflect.Type;
+import java.sql.Connection;
+import java.sql.PreparedStatement;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.util.*;
+import java.util.Map.Entry;
 
 /**
  * Upgrade catalog for version 1.7.0.
@@ -532,6 +488,9 @@ public class UpgradeCatalog170 extends AbstractUpgradeCatalog {
   @Override
   protected void executeDMLUpdates() throws AmbariException, SQLException {
     // Update historic records with the log paths, but only enough so as to not prolong the upgrade process
+    moveHcatalogIntoHiveService();
+    moveWebHcatIntoHiveService();
+
     executeInTransaction(new Runnable() {
       @Override
       public void run() {
@@ -597,6 +556,117 @@ public class UpgradeCatalog170 extends AbstractUpgradeCatalog {
     moveConfigGroupsGlobalToEnv();
   }
 
+  public void moveHcatalogIntoHiveService() throws AmbariException {
+    final String serviceName = "HIVE";
+    final String serviceNameToBeDeleted = "HCATALOG";
+    final String componentName = "HCAT";
+    moveComponentsIntoService(serviceName, serviceNameToBeDeleted, componentName);
+  }
+
+  private void moveWebHcatIntoHiveService() throws AmbariException {
+    final String serviceName = "HIVE";
+    final String serviceNameToBeDeleted = "WEBHCAT";
+    final String componentName = "WEBHCAT_SERVER";
+    moveComponentsIntoService(serviceName, serviceNameToBeDeleted, componentName);
+  }
+
+  private void moveComponentsIntoService(String serviceName, String serviceNameToBeDeleted, String componentName) throws AmbariException {
+    /**
+     * 1. ADD servicecomponentdesiredstate: Add HCAT HIVE entry:
+     * 2. Update hostcomponentdesiredstate: service_name to HIVE where service_name is HCATALOG:
+     * 3. Update hostcomponentstate: service_name to HIVE where service_name is HCATALOG:
+     * 4. DELETE servicecomponentdesiredstate: where component_name is HCAT and service_name is HCATALOG :
+     * 5. Delete servicedesiredstate where  service_name is HCATALOG:
+     * 6. Delete clusterservices where service_name is  HCATALOG:
+     */
+    ClusterDAO clusterDAO = injector.getInstance(ClusterDAO.class);
+    ClusterServiceDAO clusterServiceDAO = injector.getInstance(ClusterServiceDAO.class);
+    ServiceDesiredStateDAO serviceDesiredStateDAO = injector.getInstance(ServiceDesiredStateDAO.class);
+    ServiceComponentDesiredStateDAO serviceComponentDesiredStateDAO = injector.getInstance(ServiceComponentDesiredStateDAO.class);
+    HostComponentDesiredStateDAO hostComponentDesiredStateDAO = injector.getInstance(HostComponentDesiredStateDAO.class);
+    HostComponentStateDAO hostComponentStateDAO = injector.getInstance(HostComponentStateDAO.class);
+
+    List<ClusterEntity> clusterEntities = clusterDAO.findAll();
+    for (final ClusterEntity clusterEntity : clusterEntities) {
+      ServiceComponentDesiredStateEntityPK pkHCATInHcatalog = new ServiceComponentDesiredStateEntityPK();
+      pkHCATInHcatalog.setComponentName(componentName);
+      pkHCATInHcatalog.setClusterId(clusterEntity.getClusterId());
+      pkHCATInHcatalog.setServiceName(serviceNameToBeDeleted);
+      ServiceComponentDesiredStateEntity serviceComponentDesiredStateEntityToDelete = serviceComponentDesiredStateDAO.findByPK(pkHCATInHcatalog);
+
+      if (serviceComponentDesiredStateEntityToDelete == null)
+        continue;
+
+      ServiceDesiredStateEntityPK serviceDesiredStateEntityPK = new ServiceDesiredStateEntityPK();
+      serviceDesiredStateEntityPK.setClusterId(clusterEntity.getClusterId());
+      serviceDesiredStateEntityPK.setServiceName(serviceNameToBeDeleted);
+      ServiceDesiredStateEntity serviceDesiredStateEntity = serviceDesiredStateDAO.findByPK(serviceDesiredStateEntityPK);
+
+      ClusterServiceEntityPK clusterServiceEntityToBeDeletedPK = new ClusterServiceEntityPK();
+      clusterServiceEntityToBeDeletedPK.setClusterId(clusterEntity.getClusterId());
+      clusterServiceEntityToBeDeletedPK.setServiceName(serviceNameToBeDeleted);
+      ClusterServiceEntity clusterServiceEntityToBeDeleted = clusterServiceDAO.findByPK(clusterServiceEntityToBeDeletedPK);
+
+      ClusterServiceEntityPK clusterServiceEntityPK = new ClusterServiceEntityPK();
+      clusterServiceEntityPK.setClusterId(clusterEntity.getClusterId());
+      clusterServiceEntityPK.setServiceName(serviceName);
+
+
+      ClusterServiceEntity clusterServiceEntity = clusterServiceDAO.findByPK(clusterServiceEntityPK);
+
+      ServiceComponentDesiredStateEntity serviceComponentDesiredStateEntity = new ServiceComponentDesiredStateEntity();
+      serviceComponentDesiredStateEntity.setServiceName(serviceName);
+      serviceComponentDesiredStateEntity.setComponentName(serviceComponentDesiredStateEntityToDelete.getComponentName());
+      serviceComponentDesiredStateEntity.setClusterId(clusterEntity.getClusterId());
+      serviceComponentDesiredStateEntity.setDesiredStackVersion(serviceComponentDesiredStateEntityToDelete.getDesiredStackVersion());
+      serviceComponentDesiredStateEntity.setDesiredState(serviceComponentDesiredStateEntityToDelete.getDesiredState());
+      serviceComponentDesiredStateEntity.setClusterServiceEntity(clusterServiceEntity);
+      //serviceComponentDesiredStateDAO.create(serviceComponentDesiredStateEntity);
+
+      Iterator<HostComponentDesiredStateEntity> hostComponentDesiredStateIterator = serviceComponentDesiredStateEntityToDelete.getHostComponentDesiredStateEntities().iterator();
+      Iterator<HostComponentStateEntity> hostComponentStateIterator = serviceComponentDesiredStateEntityToDelete.getHostComponentStateEntities().iterator();
+
+      while (hostComponentDesiredStateIterator.hasNext()) {
+        HostComponentDesiredStateEntity hcDesiredStateEntityToBeDeleted = hostComponentDesiredStateIterator.next();
+        HostComponentDesiredStateEntity hostComponentDesiredStateEntity = new HostComponentDesiredStateEntity();
+        hostComponentDesiredStateEntity.setClusterId(clusterEntity.getClusterId());
+        hostComponentDesiredStateEntity.setComponentName(hcDesiredStateEntityToBeDeleted.getComponentName());
+        hostComponentDesiredStateEntity.setDesiredStackVersion(hcDesiredStateEntityToBeDeleted.getDesiredStackVersion());
+        hostComponentDesiredStateEntity.setDesiredState(hcDesiredStateEntityToBeDeleted.getDesiredState());
+        hostComponentDesiredStateEntity.setHostName(hcDesiredStateEntityToBeDeleted.getHostName());
+        hostComponentDesiredStateEntity.setHostEntity(hcDesiredStateEntityToBeDeleted.getHostEntity());
+        hostComponentDesiredStateEntity.setAdminState(hcDesiredStateEntityToBeDeleted.getAdminState());
+        hostComponentDesiredStateEntity.setMaintenanceState(hcDesiredStateEntityToBeDeleted.getMaintenanceState());
+        hostComponentDesiredStateEntity.setRestartRequired(hcDesiredStateEntityToBeDeleted.isRestartRequired());
+        hostComponentDesiredStateEntity.setServiceName(serviceName);
+        hostComponentDesiredStateEntity.setServiceComponentDesiredStateEntity(serviceComponentDesiredStateEntity);
+        hostComponentDesiredStateDAO.merge(hostComponentDesiredStateEntity);
+        hostComponentDesiredStateDAO.remove(hcDesiredStateEntityToBeDeleted);
+      }
+
+      while (hostComponentStateIterator.hasNext()) {
+        HostComponentStateEntity hcStateToBeDeleted = hostComponentStateIterator.next();
+        HostComponentStateEntity hostComponentStateEntity = new HostComponentStateEntity();
+        hostComponentStateEntity.setClusterId(clusterEntity.getClusterId());
+        hostComponentStateEntity.setComponentName(hcStateToBeDeleted.getComponentName());
+        hostComponentStateEntity.setCurrentStackVersion(hcStateToBeDeleted.getCurrentStackVersion());
+        hostComponentStateEntity.setCurrentState(hcStateToBeDeleted.getCurrentState());
+        hostComponentStateEntity.setHostName(hcStateToBeDeleted.getHostName());
+        hostComponentStateEntity.setHostEntity(hcStateToBeDeleted.getHostEntity());
+        hostComponentStateEntity.setServiceName(serviceName);
+        hostComponentStateEntity.setServiceComponentDesiredStateEntity(serviceComponentDesiredStateEntity);
+        hostComponentStateDAO.merge(hostComponentStateEntity);
+        hostComponentStateDAO.remove(hcStateToBeDeleted);
+      }
+      serviceComponentDesiredStateEntity.setClusterServiceEntity(clusterServiceEntity);
+      serviceComponentDesiredStateDAO.merge(serviceComponentDesiredStateEntity);
+      serviceComponentDesiredStateDAO.remove(serviceComponentDesiredStateEntityToDelete);
+      serviceDesiredStateDAO.remove(serviceDesiredStateEntity);
+      clusterServiceDAO.remove(clusterServiceEntityToBeDeleted);
+    }
+  }
+
+
   private void moveConfigGroupsGlobalToEnv() throws AmbariException {
     final ConfigGroupConfigMappingDAO confGroupConfMappingDAO = injector.getInstance(ConfigGroupConfigMappingDAO.class);
     ConfigHelper configHelper = injector.getInstance(ConfigHelper.class);
@@ -905,7 +975,7 @@ public class UpgradeCatalog170 extends AbstractUpgradeCatalog {
     if (clusterMap != null && !clusterMap.isEmpty()) {
       for (final Cluster cluster : clusterMap.values()) {
         Set<String> configTypes = configHelper.findConfigTypesByPropertyName(cluster.getCurrentStackVersion(),
-                CONTENT_FIELD_NAME, cluster.getClusterName());
+            CONTENT_FIELD_NAME, cluster.getClusterName());
 
         for(String configType:configTypes) {
           if(!configType.endsWith(ENV_CONFIGS_POSTFIX)) {

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-server/src/main/resources/custom_actions/validate_configs.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/custom_actions/validate_configs.py b/ambari-server/src/main/resources/custom_actions/validate_configs.py
index 295e74a..c245dbb 100644
--- a/ambari-server/src/main/resources/custom_actions/validate_configs.py
+++ b/ambari-server/src/main/resources/custom_actions/validate_configs.py
@@ -131,7 +131,6 @@ PROPERTIES_TO_CHECK = {
   "HIVE_CLIENT": {
     "hive-env": ["hive_log_dir", "hive_pid_dir"]
   },
-  #HCATALOG
   "HCAT": {
     "hive-env": ["hcat_log_dir", "hcat_pid_dir"]
   },
@@ -297,7 +296,6 @@ USERS_TO_GROUP_MAPPING = {
       "hive_user": "hive_user"
     }
   },
-  #HCATALOG
   "HCAT": {
     "hive-env": {
       "hive_user": "hive_user"

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-server/src/main/resources/stacks/HDP/1.3.2/role_command_order.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/role_command_order.json b/ambari-server/src/main/resources/stacks/HDP/1.3.2/role_command_order.json
index a05324f..67eeff3 100644
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/role_command_order.json
+++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/role_command_order.json
@@ -20,10 +20,8 @@
         "WEBHCAT_SERVER-START"],
     "MAPREDUCE_SERVICE_CHECK-SERVICE_CHECK": ["JOBTRACKER-START", "TASKTRACKER-START"],
     "OOZIE_SERVICE_CHECK-SERVICE_CHECK": ["OOZIE_SERVER-START"],
-    "WEBHCAT_SERVICE_CHECK-SERVICE_CHECK": ["WEBHCAT_SERVER-START"],
     "HBASE_SERVICE_CHECK-SERVICE_CHECK": ["HBASE_MASTER-START", "HBASE_REGIONSERVER-START"],
-    "HIVE_SERVICE_CHECK-SERVICE_CHECK": ["HIVE_SERVER-START", "HIVE_METASTORE-START"],
-    "HCAT_SERVICE_CHECK-SERVICE_CHECK": ["HIVE_SERVER-START"],
+    "HIVE_SERVICE_CHECK-SERVICE_CHECK": ["HIVE_SERVER-START", "HIVE_METASTORE-START", "WEBHCAT_SERVER-START"],
     "PIG_SERVICE_CHECK-SERVICE_CHECK": ["JOBTRACKER-START", "TASKTRACKER-START"],
     "SQOOP_SERVICE_CHECK-SERVICE_CHECK": ["JOBTRACKER-START", "TASKTRACKER-START"],
     "ZOOKEEPER_SERVICE_CHECK-SERVICE_CHECK": ["ZOOKEEPER_SERVER-START"],

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/configuration/webhcat-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/configuration/webhcat-env.xml b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/configuration/webhcat-env.xml
new file mode 100644
index 0000000..f0679e5
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/configuration/webhcat-env.xml
@@ -0,0 +1,54 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+<!--
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+-->
+
+<configuration>
+  <!-- webhcat-env.sh -->
+  <property>
+    <name>content</name>
+    <description>webhcat-env.sh content</description>
+    <value>
+# The file containing the running pid
+PID_FILE={{webhcat_pid_file}}
+
+TEMPLETON_LOG_DIR={{templeton_log_dir}}/
+
+
+WEBHCAT_LOG_DIR={{templeton_log_dir}}/
+
+# The console error log
+ERROR_LOG={{templeton_log_dir}}/webhcat-console-error.log
+
+# The console log
+CONSOLE_LOG={{templeton_log_dir}}/webhcat-console.log
+
+#TEMPLETON_JAR=templeton_jar_name
+
+#HADOOP_PREFIX=hadoop_prefix
+
+#HCAT_PREFIX=hive_prefix
+
+# Set HADOOP_HOME to point to a specific hadoop install directory
+export HADOOP_HOME=/usr/lib/hadoop
+    </value>
+  </property>
+  
+</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/configuration/webhcat-site.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/configuration/webhcat-site.xml b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/configuration/webhcat-site.xml
new file mode 100644
index 0000000..b87ca7c
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/configuration/webhcat-site.xml
@@ -0,0 +1,156 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!-- 
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+-->
+
+<!-- The default settings for Templeton. -->
+<!-- Edit templeton-site.xml to change settings for your local -->
+<!-- install. -->
+
+<configuration supports_final="true">
+
+  <property>
+    <name>templeton.port</name>
+      <value>50111</value>
+    <description>The HTTP port for the main server.</description>
+  </property>
+
+  <property>
+    <name>templeton.hadoop.conf.dir</name>
+    <value>/etc/hadoop/conf</value>
+    <description>The path to the Hadoop configuration.</description>
+  </property>
+
+  <property>
+    <name>templeton.jar</name>
+    <value>/usr/lib/hcatalog/share/webhcat/svr/webhcat.jar</value>
+    <description>The path to the Templeton jar file.</description>
+  </property>
+
+  <property>
+    <name>templeton.libjars</name>
+    <value>/usr/lib/zookeeper/zookeeper.jar</value>
+    <description>Jars to add the the classpath.</description>
+  </property>
+
+
+  <property>
+    <name>templeton.hadoop</name>
+    <value>/usr/bin/hadoop</value>
+    <description>The path to the Hadoop executable.</description>
+  </property>
+
+  <property>
+    <name>templeton.pig.archive</name>
+    <value>hdfs:///apps/webhcat/pig.tar.gz</value>
+    <description>The path to the Pig archive.</description>
+  </property>
+
+  <property>
+    <name>templeton.pig.path</name>
+    <value>pig.tar.gz/pig/bin/pig</value>
+    <description>The path to the Pig executable.</description>
+  </property>
+
+  <property>
+    <name>templeton.hcat</name>
+    <value>/usr/bin/hcat</value>
+    <description>The path to the hcatalog executable.</description>
+  </property>
+
+  <property>
+    <name>templeton.hive.archive</name>
+    <value>hdfs:///apps/webhcat/hive.tar.gz</value>
+    <description>The path to the Hive archive.</description>
+  </property>
+
+  <property>
+    <name>templeton.hive.path</name>
+    <value>hive.tar.gz/hive/bin/hive</value>
+    <description>The path to the Hive executable.</description>
+  </property>
+
+  <property>
+    <name>templeton.hive.properties</name>
+    <value>hive.metastore.local=false, hive.metastore.uris=thrift://localhost:9933, hive.metastore.sasl.enabled=false</value>
+    <description>Properties to set when running hive.</description>
+  </property>
+
+
+  <property>
+    <name>templeton.zookeeper.hosts</name>
+    <value>localhost:2181</value>
+    <description>ZooKeeper servers, as comma separated host:port pairs</description>
+  </property>
+
+  <property>
+    <name>templeton.storage.class</name>
+    <value>org.apache.hcatalog.templeton.tool.ZooKeeperStorage</value>
+    <description>The class to use as storage</description>
+  </property>
+
+  <property>
+   <name>templeton.override.enabled</name>
+   <value>false</value>
+   <description>
+     Enable the override path in templeton.override.jars
+   </description>
+ </property>
+
+ <property>
+    <name>templeton.streaming.jar</name>
+    <value>hdfs:///apps/webhcat/hadoop-streaming.jar</value>
+    <description>The hdfs path to the Hadoop streaming jar file.</description>
+  </property> 
+
+  <property>
+    <name>templeton.exec.timeout</name>
+    <value>60000</value>
+    <description>Time out for templeton api</description>
+  </property>
+
+  <!-- webhcat-env.sh -->
+  <property>
+    <name>content</name>
+    <description>webhcat-env.sh content</description>
+    <value>
+# The file containing the running pid
+PID_FILE={{pid_file}}
+
+TEMPLETON_LOG_DIR={{templeton_log_dir}}/
+
+
+WEBHCAT_LOG_DIR={{templeton_log_dir}}/
+
+# The console error log
+ERROR_LOG={{templeton_log_dir}}/webhcat-console-error.log
+
+# The console log
+CONSOLE_LOG={{templeton_log_dir}}/webhcat-console.log
+
+#TEMPLETON_JAR=templeton_jar_name
+
+#HADOOP_PREFIX=hadoop_prefix
+
+#HCAT_PREFIX=hive_prefix
+
+# Set HADOOP_HOME to point to a specific hadoop install directory
+export HADOOP_HOME=/usr/lib/hadoop
+    </value>
+  </property>
+  
+</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/metainfo.xml
index 2c58c44..c78de04 100644
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/metainfo.xml
@@ -81,6 +81,49 @@
         </component>
 
         <component>
+          <name>WEBHCAT_SERVER</name>
+          <displayName>WebHCat Server</displayName>
+          <category>MASTER</category>
+          <cardinality>1</cardinality>
+          <dependencies>
+            <dependency>
+              <name>HDFS/HDFS_CLIENT</name>
+              <scope>host</scope>
+              <auto-deploy>
+                <enabled>true</enabled>
+              </auto-deploy>
+            </dependency>
+            <dependency>
+              <name>MAPREDUCE/MAPREDUCE_CLIENT</name>
+              <scope>host</scope>
+              <auto-deploy>
+                <enabled>true</enabled>
+              </auto-deploy>
+            </dependency>
+            <dependency>
+              <name>ZOOKEEPER/ZOOKEEPER_SERVER</name>
+              <scope>cluster</scope>
+              <auto-deploy>
+                <enabled>true</enabled>
+                <co-locate>HIVE/WEBHCAT_SERVER</co-locate>
+              </auto-deploy>
+            </dependency>
+            <dependency>
+              <name>ZOOKEEPER/ZOOKEEPER_CLIENT</name>
+              <scope>host</scope>
+              <auto-deploy>
+                <enabled>true</enabled>
+              </auto-deploy>
+            </dependency>
+          </dependencies>
+          <commandScript>
+            <script>scripts/webhcat_server.py</script>
+            <scriptType>PYTHON</scriptType>
+            <timeout>600</timeout>
+          </commandScript>
+        </component>
+
+        <component>
           <name>HIVE_CLIENT</name>
           <displayName>Hive Client</displayName>
           <category>CLIENT</category>
@@ -112,68 +155,6 @@
             </configFile>            
           </configFiles>
         </component>
-      </components>
-
-      <osSpecifics>
-        <osSpecific>
-          <osFamily>any</osFamily>
-          <packages>
-            <package>
-              <name>hive</name>
-            </package>
-            <package>
-              <name>mysql-connector-java</name>
-            </package>
-            <package>
-              <name>mysql</name>
-            </package>
-          </packages>
-        </osSpecific>
-        <osSpecific>
-          <osFamily>redhat5,redhat6</osFamily>
-          <packages>
-            <package>
-              <name>mysql-server</name>
-            </package>
-          </packages>
-        </osSpecific>
-        <osSpecific>
-          <osFamily>suse11</osFamily>
-          <packages>
-            <package>
-              <name>mysql-client</name>
-            </package>
-          </packages>
-        </osSpecific>
-      </osSpecifics>
-
-      <commandScript>
-        <script>scripts/service_check.py</script>
-        <scriptType>PYTHON</scriptType>
-        <timeout>300</timeout>
-      </commandScript>
-      
-      <requiredServices>
-        <service>MAPREDUCE</service>
-        <service>ZOOKEEPER</service>
-      </requiredServices>
-
-      <configuration-dependencies>
-        <config-type>hive-site</config-type>
-        <config-type>hive-env</config-type>
-        <config-type>hive-log4j</config-type>
-        <config-type>hive-exec-log4j</config-type>
-      </configuration-dependencies>
-    </service>
-
-    <service>
-      <name>HCATALOG</name>
-      <displayName>HCatalog</displayName>
-      <comment>A table and storage management layer for Hadoop that enables users with different data processing tools
-        to more easily read and write data on the grid.
-      </comment>
-      <version>0.11.0.1.3.3.0</version>
-      <components>
         <component>
           <name>HCAT</name>
           <displayName>HCat</displayName>
@@ -206,16 +187,49 @@
           </configFiles>
         </component>
       </components>
+
       <osSpecifics>
         <osSpecific>
           <osFamily>any</osFamily>
           <packages>
             <package>
+              <name>hive</name>
+            </package>
+            <package>
               <name>hcatalog</name>
             </package>
+            <package>
+              <name>webhcat-tar-hive</name>
+            </package>
+            <package>
+              <name>webhcat-tar-pig</name>
+            </package>
+            <package>
+              <name>mysql-connector-java</name>
+            </package>
+            <package>
+              <name>mysql</name>
+            </package>
+          </packages>
+        </osSpecific>
+        <osSpecific>
+          <osFamily>redhat5,redhat6</osFamily>
+          <packages>
+            <package>
+              <name>mysql-server</name>
+            </package>
+          </packages>
+        </osSpecific>
+        <osSpecific>
+          <osFamily>suse11</osFamily>
+          <packages>
+            <package>
+              <name>mysql-client</name>
+            </package>
           </packages>
         </osSpecific>
       </osSpecifics>
+
       <commandScript>
         <script>scripts/service_check.py</script>
         <scriptType>PYTHON</scriptType>
@@ -223,22 +237,18 @@
       </commandScript>
       
       <requiredServices>
-        <service>HIVE</service>
+        <service>MAPREDUCE</service>
+        <service>ZOOKEEPER</service>
       </requiredServices>
 
       <configuration-dependencies>
         <config-type>hive-site</config-type>
         <config-type>hive-env</config-type>
-      </configuration-dependencies>
-
-      <excluded-config-types>
-        <config-type>hive-env</config-type>
-        <config-type>hive-site</config-type>
-        <config-type>hive-exec-log4j</config-type>
         <config-type>hive-log4j</config-type>
-      </excluded-config-types>
-
+        <config-type>hive-exec-log4j</config-type>
+        <config-type>webhcat-site</config-type>
+        <config-type>webhcat-env</config-type>
+      </configuration-dependencies>
     </service>
-
   </services>
 </metainfo>

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/files/templetonSmoke.sh
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/files/templetonSmoke.sh b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/files/templetonSmoke.sh
new file mode 100644
index 0000000..21204e6
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/files/templetonSmoke.sh
@@ -0,0 +1,96 @@
+#!/bin/sh
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+
+export ttonhost=$1
+export smoke_test_user=$2
+export smoke_user_keytab=$3
+export security_enabled=$4
+export kinit_path_local=$5
+export ttonurl="http://${ttonhost}:50111/templeton/v1"
+
+if [[ $security_enabled == "true" ]]; then
+  kinitcmd="${kinit_path_local}  -kt ${smoke_user_keytab} ${smoke_test_user}; "
+else
+  kinitcmd=""
+fi
+
+export no_proxy=$ttonhost
+cmd="${kinitcmd}curl --negotiate -u : -s -w 'http_code <%{http_code}>'    $ttonurl/status 2>&1"
+retVal=`su - ${smoke_test_user} -c "$cmd"`
+httpExitCode=`echo $retVal |sed 's/.*http_code <\([0-9]*\)>.*/\1/'`
+
+if [[ "$httpExitCode" -ne "200" ]] ; then
+  echo "Templeton Smoke Test (status cmd): Failed. : $retVal"
+  export TEMPLETON_EXIT_CODE=1
+  exit 1
+fi
+
+exit 0
+
+#try hcat ddl command
+echo "user.name=${smoke_test_user}&exec=show databases;" /tmp/show_db.post.txt
+cmd="${kinitcmd}curl --negotiate -u : -s -w 'http_code <%{http_code}>' -d  \@${destdir}/show_db.post.txt  $ttonurl/ddl 2>&1"
+retVal=`su - ${smoke_test_user} -c "$cmd"`
+httpExitCode=`echo $retVal |sed 's/.*http_code <\([0-9]*\)>.*/\1/'`
+
+if [[ "$httpExitCode" -ne "200" ]] ; then
+  echo "Templeton Smoke Test (ddl cmd): Failed. : $retVal"
+  export TEMPLETON_EXIT_CODE=1
+  exit  1
+fi
+
+# NOT SURE?? SUHAS
+if [[ $security_enabled == "true" ]]; then
+  echo "Templeton Pig Smoke Tests not run in secure mode"
+  exit 0
+fi
+
+#try pig query
+outname=${smoke_test_user}.`date +"%M%d%y"`.$$;
+ttonTestOutput="/tmp/idtest.${outname}.out";
+ttonTestInput="/tmp/idtest.${outname}.in";
+ttonTestScript="idtest.${outname}.pig"
+
+echo "A = load '$ttonTestInput' using PigStorage(':');"  > /tmp/$ttonTestScript
+echo "B = foreach A generate \$0 as id; " >> /tmp/$ttonTestScript
+echo "store B into '$ttonTestOutput';" >> /tmp/$ttonTestScript
+
+#copy pig script to hdfs
+su - ${smoke_test_user} -c "hadoop dfs -copyFromLocal /tmp/$ttonTestScript /tmp/$ttonTestScript"
+
+#copy input file to hdfs
+su - ${smoke_test_user} -c "hadoop dfs -copyFromLocal /etc/passwd $ttonTestInput"
+
+#create, copy post args file
+echo -n "user.name=${smoke_test_user}&file=/tmp/$ttonTestScript" > /tmp/pig_post.txt
+
+#submit pig query
+cmd="curl -s -w 'http_code <%{http_code}>' -d  \@${destdir}/pig_post.txt  $ttonurl/pig 2>&1"
+retVal=`su - ${smoke_test_user} -c "$cmd"`
+httpExitCode=`echo $retVal |sed 's/.*http_code <\([0-9]*\)>.*/\1/'`
+if [[ "$httpExitCode" -ne "200" ]] ; then
+  echo "Templeton Smoke Test (pig cmd): Failed. : $retVal"
+  export TEMPLETON_EXIT_CODE=1
+  exit 1
+fi
+
+exit 0

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/params.py
index e38f059..2285cac 100644
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/params.py
@@ -64,7 +64,6 @@ smoke_user_keytab = config['configurations']['cluster-env']['smokeuser_keytab']
 
 security_enabled = config['configurations']['cluster-env']['security_enabled']
 
-kinit_path_local = functions.get_kinit_path(["/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
 hive_metastore_keytab_path =  config['configurations']['hive-site']['hive.metastore.kerberos.keytab.file']
 
 #hive_env
@@ -155,11 +154,37 @@ hive_hdfs_user_dir = format("/user/{hive_user}")
 hive_hdfs_user_mode = 0700
 #for create_hdfs_directory
 hostname = config["hostname"]
-hadoop_conf_dir = "/etc/hadoop/conf"
 hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab']
-hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
 hdfs_principal_name = config['configurations']['hadoop-env']['hdfs_principal_name']
 kinit_path_local = functions.get_kinit_path(["/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
+
+#################################################
+################## WebHCat ######################
+#################################################
+webhcat_env_sh_template = config['configurations']['webhcat-env']['content']
+
+config_dir = '/etc/hcatalog/conf'
+
+templeton_log_dir = config['configurations']['hive-env']['hcat_log_dir']
+templeton_pid_dir = status_params.templeton_pid_dir
+
+webhcat_pid_file = status_params.webhcat_pid_file
+
+templeton_jar = config['configurations']['webhcat-site']['templeton.jar']
+
+
+webhcat_server_host = config['clusterHostInfo']['webhcat_server_host']
+
+webhcat_apps_dir = "/apps/webhcat"
+
+#hdfs directories
+hcat_hdfs_user_dir = format("/user/{hcat_user}")
+hcat_hdfs_user_mode = 0755
+webhcat_hdfs_user_dir = format("/user/{webhcat_user}")
+webhcat_hdfs_user_mode = 0755
+#for create_hdfs_directory
+security_param = "true" if security_enabled else "false"
+
 import functools
 #create partial functions with common arguments for every HdfsDirectory call
 #to create hdfs directory we need to call params.HdfsDirectory in code

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/service_check.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/service_check.py
index 09ba1bf..d7b10eb 100644
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/service_check.py
+++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/service_check.py
@@ -23,6 +23,7 @@ import socket
 import sys
 
 from hcat_service_check import hcat_service_check
+from webhcat_service_check import webhcat_service_check
 
 class HiveServiceCheck(Script):
   def service_check(self, env):
@@ -42,6 +43,7 @@ class HiveServiceCheck(Script):
       sys.exit(1)
 
     hcat_service_check()
+    webhcat_service_check()
 
 if __name__ == "__main__":
   HiveServiceCheck().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/status_params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/status_params.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/status_params.py
index f371bee..7c1af00 100644
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/status_params.py
+++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/status_params.py
@@ -29,6 +29,9 @@ hive_metastore_pid = 'hive.pid'
 
 hcat_pid_dir = config['configurations']['hive-env']['hcat_pid_dir'] #hcat_pid_dir
 
+templeton_pid_dir = config['configurations']['hive-env']['hcat_pid_dir']
+webhcat_pid_file = format('{templeton_pid_dir}/webhcat.pid')
+
 if System.get_instance().os_family == "suse":
   daemon_name = 'mysql'
 else:

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/webhcat.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/webhcat.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/webhcat.py
new file mode 100644
index 0000000..037cdb5
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/webhcat.py
@@ -0,0 +1,107 @@
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+Ambari Agent
+
+"""
+from resource_management import *
+
+
+def webhcat():
+  import params
+
+  if params.hcat_hdfs_user_dir != params.webhcat_hdfs_user_dir:
+    params.HdfsDirectory(params.hcat_hdfs_user_dir,
+                         action="create_delayed",
+                         owner=params.hcat_user,
+                         mode=params.hcat_hdfs_user_mode
+    )
+  params.HdfsDirectory(params.webhcat_hdfs_user_dir,
+                       action="create_delayed",
+                       owner=params.webhcat_user,
+                       mode=params.webhcat_hdfs_user_mode
+  )
+  params.HdfsDirectory(params.webhcat_apps_dir,
+                       action="create_delayed",
+                       owner=params.webhcat_user,
+                       mode=0755
+  )
+  params.HdfsDirectory(None, action="create")
+  Directory(params.templeton_pid_dir,
+            owner=params.webhcat_user,
+            mode=0755,
+            group=params.user_group,
+            recursive=True)
+
+  Directory(params.templeton_log_dir,
+            owner=params.webhcat_user,
+            mode=0755,
+            group=params.user_group,
+            recursive=True)
+
+  Directory(params.config_dir,
+            owner=params.webhcat_user,
+            group=params.user_group)
+
+  XmlConfig("webhcat-site.xml",
+            conf_dir=params.config_dir,
+            configurations=params.config['configurations']['webhcat-site'],
+            configuration_attributes=params.config['configuration_attributes']['webhcat-site'],
+            owner=params.webhcat_user,
+            group=params.user_group,
+  )
+
+  File(format("{config_dir}/webhcat-env.sh"),
+       owner=params.webhcat_user,
+       group=params.user_group,
+       content=InlineTemplate(params.webhcat_env_sh_template)
+  )
+
+  if params.security_enabled:
+    kinit_if_needed = format("{kinit_path_local} -kt {hdfs_user_keytab} {hdfs_user};")
+  else:
+    kinit_if_needed = ""
+
+  if kinit_if_needed:
+    Execute(kinit_if_needed,
+            user=params.webhcat_user,
+            path='/bin'
+    )
+
+  CopyFromLocal('/usr/lib/hadoop/contrib/streaming/hadoop-streaming*.jar',
+                owner=params.webhcat_user,
+                mode=0755,
+                dest_dir=params.webhcat_apps_dir,
+                kinnit_if_needed=kinit_if_needed,
+                hdfs_user=params.hdfs_user
+  )
+
+  CopyFromLocal('/usr/share/HDP-webhcat/pig.tar.gz',
+                owner=params.webhcat_user,
+                mode=0755,
+                dest_dir=params.webhcat_apps_dir,
+                kinnit_if_needed=kinit_if_needed,
+                hdfs_user=params.hdfs_user
+  )
+
+  CopyFromLocal('/usr/share/HDP-webhcat/hive.tar.gz',
+                owner=params.webhcat_user,
+                mode=0755,
+                dest_dir=params.webhcat_apps_dir,
+                kinnit_if_needed=kinit_if_needed,
+                hdfs_user=params.hdfs_user
+  )

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/webhcat_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/webhcat_server.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/webhcat_server.py
new file mode 100644
index 0000000..088cb41
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/webhcat_server.py
@@ -0,0 +1,53 @@
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+Ambari Agent
+
+"""
+import sys
+from resource_management import *
+
+from webhcat import webhcat
+from webhcat_service import webhcat_service
+
+class WebHCatServer(Script):
+  def install(self, env):
+    self.install_packages(env)
+  def configure(self, env):
+    import params
+    env.set_params(params)
+    webhcat()
+
+  def start(self, env):
+    import params
+    env.set_params(params)
+    self.configure(env) # FOR SECURITY
+    webhcat_service(action = 'start')
+
+  def stop(self, env):
+    import params
+    env.set_params(params)
+
+    webhcat_service(action = 'stop')
+
+  def status(self, env):
+    import status_params
+    env.set_params(status_params)
+    check_process_status(status_params.webhcat_pid_file)
+
+if __name__ == "__main__":
+  WebHCatServer().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/webhcat_service.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/webhcat_service.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/webhcat_service.py
new file mode 100644
index 0000000..99c49a6
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/webhcat_service.py
@@ -0,0 +1,40 @@
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+Ambari Agent
+
+"""
+from resource_management import *
+
+def webhcat_service(action='start'):
+  import params
+
+  cmd = format('env HADOOP_HOME={hadoop_home} /usr/lib/hcatalog/sbin/webhcat_server.sh')
+
+  if action == 'start':
+    demon_cmd = format('{cmd} start')
+    no_op_test = format('ls {webhcat_pid_file} >/dev/null 2>&1 && ps `cat {webhcat_pid_file}` >/dev/null 2>&1')
+    Execute(demon_cmd,
+            user=params.webhcat_user,
+            not_if=no_op_test
+    )
+  elif action == 'stop':
+    demon_cmd = format('{cmd} stop')
+    Execute(demon_cmd,
+            user=params.webhcat_user
+    )
+    Execute(format('rm -f {webhcat_pid_file}'))

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/webhcat_service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/webhcat_service_check.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/webhcat_service_check.py
new file mode 100644
index 0000000..1352e0b
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/webhcat_service_check.py
@@ -0,0 +1,42 @@
+#!/usr/bin/env python
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+
+from resource_management import *
+
+def webhcat_service_check():
+  import params
+  File(format("{tmp_dir}/templetonSmoke.sh"),
+       content= StaticFile('templetonSmoke.sh'),
+       mode=0755
+  )
+
+  cmd = format("{tmp_dir}/templetonSmoke.sh {webhcat_server_host[0]} {smokeuser} {smokeuser_keytab}"
+               " {security_param} {kinit_path_local}",
+               smokeuser_keytab=params.smoke_user_keytab if params.security_enabled else "no_keytab")
+
+  Execute(cmd,
+          tries=3,
+          try_sleep=5,
+          path='/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin',
+          logoutput=True)
+
+
+
+

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/NAGIOS/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/NAGIOS/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/NAGIOS/metainfo.xml
index c926429..137ab71 100644
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/NAGIOS/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/NAGIOS/metainfo.xml
@@ -52,7 +52,7 @@
               </auto-deploy>
             </dependency>
             <dependency>
-              <name>HCATALOG/HCAT</name>
+              <name>HIVE/HCAT</name>
               <scope>host</scope>
               <auto-deploy>
                 <enabled>true</enabled>

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/NAGIOS/package/templates/hadoop-servicegroups.cfg.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/NAGIOS/package/templates/hadoop-servicegroups.cfg.j2 b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/NAGIOS/package/templates/hadoop-servicegroups.cfg.j2
index c8f2d87..3833b15 100644
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/NAGIOS/package/templates/hadoop-servicegroups.cfg.j2
+++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/NAGIOS/package/templates/hadoop-servicegroups.cfg.j2
@@ -69,12 +69,6 @@ define servicegroup {
   alias  OOZIE Checks
 }
 {% endif %}
-{% if hostgroup_defs['webhcat-server'] %}
-define servicegroup {
-  servicegroup_name  WEBHCAT
-  alias  WEBHCAT Checks
-}
-{% endif %}
 {% if hostgroup_defs['nagios-server'] %}
 define servicegroup {
   servicegroup_name  NAGIOS

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/NAGIOS/package/templates/hadoop-services.cfg.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/NAGIOS/package/templates/hadoop-services.cfg.j2 b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/NAGIOS/package/templates/hadoop-services.cfg.j2
index 1a68bfd..969c0f6 100644
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/NAGIOS/package/templates/hadoop-services.cfg.j2
+++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/NAGIOS/package/templates/hadoop-services.cfg.j2
@@ -566,7 +566,7 @@ define service {
         hostgroup_name          webhcat-server
         use                     hadoop-service
         service_description     WEBHCAT::WebHCat Server status
-        servicegroups           WEBHCAT 
+        servicegroups           HIVE
         {% if security_enabled %}
         check_command           check_templeton_status!{{ templeton_port }}!v1!{{ str(security_enabled).lower() }}!{{ nagios_keytab_path }}!{{ nagios_principal_name }}!{{ kinit_path_local }}
         {% else %}

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/WEBHCAT/configuration/webhcat-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/WEBHCAT/configuration/webhcat-env.xml b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/WEBHCAT/configuration/webhcat-env.xml
deleted file mode 100644
index 304bbb7..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/WEBHCAT/configuration/webhcat-env.xml
+++ /dev/null
@@ -1,54 +0,0 @@
-<?xml version="1.0"?>
-<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
-<!--
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
--->
-
-<configuration>
-  <!-- webhcat-env.sh -->
-  <property>
-    <name>content</name>
-    <description>webhcat-env.sh content</description>
-    <value>
-# The file containing the running pid
-PID_FILE={{pid_file}}
-
-TEMPLETON_LOG_DIR={{templeton_log_dir}}/
-
-
-WEBHCAT_LOG_DIR={{templeton_log_dir}}/
-
-# The console error log
-ERROR_LOG={{templeton_log_dir}}/webhcat-console-error.log
-
-# The console log
-CONSOLE_LOG={{templeton_log_dir}}/webhcat-console.log
-
-#TEMPLETON_JAR=templeton_jar_name
-
-#HADOOP_PREFIX=hadoop_prefix
-
-#HCAT_PREFIX=hive_prefix
-
-# Set HADOOP_HOME to point to a specific hadoop install directory
-export HADOOP_HOME=/usr/lib/hadoop
-    </value>
-  </property>
-  
-</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/WEBHCAT/configuration/webhcat-site.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/WEBHCAT/configuration/webhcat-site.xml b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/WEBHCAT/configuration/webhcat-site.xml
deleted file mode 100644
index b87ca7c..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/WEBHCAT/configuration/webhcat-site.xml
+++ /dev/null
@@ -1,156 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!-- 
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
--->
-
-<!-- The default settings for Templeton. -->
-<!-- Edit templeton-site.xml to change settings for your local -->
-<!-- install. -->
-
-<configuration supports_final="true">
-
-  <property>
-    <name>templeton.port</name>
-      <value>50111</value>
-    <description>The HTTP port for the main server.</description>
-  </property>
-
-  <property>
-    <name>templeton.hadoop.conf.dir</name>
-    <value>/etc/hadoop/conf</value>
-    <description>The path to the Hadoop configuration.</description>
-  </property>
-
-  <property>
-    <name>templeton.jar</name>
-    <value>/usr/lib/hcatalog/share/webhcat/svr/webhcat.jar</value>
-    <description>The path to the Templeton jar file.</description>
-  </property>
-
-  <property>
-    <name>templeton.libjars</name>
-    <value>/usr/lib/zookeeper/zookeeper.jar</value>
-    <description>Jars to add the the classpath.</description>
-  </property>
-
-
-  <property>
-    <name>templeton.hadoop</name>
-    <value>/usr/bin/hadoop</value>
-    <description>The path to the Hadoop executable.</description>
-  </property>
-
-  <property>
-    <name>templeton.pig.archive</name>
-    <value>hdfs:///apps/webhcat/pig.tar.gz</value>
-    <description>The path to the Pig archive.</description>
-  </property>
-
-  <property>
-    <name>templeton.pig.path</name>
-    <value>pig.tar.gz/pig/bin/pig</value>
-    <description>The path to the Pig executable.</description>
-  </property>
-
-  <property>
-    <name>templeton.hcat</name>
-    <value>/usr/bin/hcat</value>
-    <description>The path to the hcatalog executable.</description>
-  </property>
-
-  <property>
-    <name>templeton.hive.archive</name>
-    <value>hdfs:///apps/webhcat/hive.tar.gz</value>
-    <description>The path to the Hive archive.</description>
-  </property>
-
-  <property>
-    <name>templeton.hive.path</name>
-    <value>hive.tar.gz/hive/bin/hive</value>
-    <description>The path to the Hive executable.</description>
-  </property>
-
-  <property>
-    <name>templeton.hive.properties</name>
-    <value>hive.metastore.local=false, hive.metastore.uris=thrift://localhost:9933, hive.metastore.sasl.enabled=false</value>
-    <description>Properties to set when running hive.</description>
-  </property>
-
-
-  <property>
-    <name>templeton.zookeeper.hosts</name>
-    <value>localhost:2181</value>
-    <description>ZooKeeper servers, as comma separated host:port pairs</description>
-  </property>
-
-  <property>
-    <name>templeton.storage.class</name>
-    <value>org.apache.hcatalog.templeton.tool.ZooKeeperStorage</value>
-    <description>The class to use as storage</description>
-  </property>
-
-  <property>
-   <name>templeton.override.enabled</name>
-   <value>false</value>
-   <description>
-     Enable the override path in templeton.override.jars
-   </description>
- </property>
-
- <property>
-    <name>templeton.streaming.jar</name>
-    <value>hdfs:///apps/webhcat/hadoop-streaming.jar</value>
-    <description>The hdfs path to the Hadoop streaming jar file.</description>
-  </property> 
-
-  <property>
-    <name>templeton.exec.timeout</name>
-    <value>60000</value>
-    <description>Time out for templeton api</description>
-  </property>
-
-  <!-- webhcat-env.sh -->
-  <property>
-    <name>content</name>
-    <description>webhcat-env.sh content</description>
-    <value>
-# The file containing the running pid
-PID_FILE={{pid_file}}
-
-TEMPLETON_LOG_DIR={{templeton_log_dir}}/
-
-
-WEBHCAT_LOG_DIR={{templeton_log_dir}}/
-
-# The console error log
-ERROR_LOG={{templeton_log_dir}}/webhcat-console-error.log
-
-# The console log
-CONSOLE_LOG={{templeton_log_dir}}/webhcat-console.log
-
-#TEMPLETON_JAR=templeton_jar_name
-
-#HADOOP_PREFIX=hadoop_prefix
-
-#HCAT_PREFIX=hive_prefix
-
-# Set HADOOP_HOME to point to a specific hadoop install directory
-export HADOOP_HOME=/usr/lib/hadoop
-    </value>
-  </property>
-  
-</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/WEBHCAT/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/WEBHCAT/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/WEBHCAT/metainfo.xml
deleted file mode 100644
index b115c28..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/WEBHCAT/metainfo.xml
+++ /dev/null
@@ -1,103 +0,0 @@
-<?xml version="1.0"?>
-<!--
-   Licensed to the Apache Software Foundation (ASF) under one or more
-   contributor license agreements.  See the NOTICE file distributed with
-   this work for additional information regarding copyright ownership.
-   The ASF licenses this file to You under the Apache License, Version 2.0
-   (the "License"); you may not use this file except in compliance with
-   the License.  You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-   Unless required by applicable law or agreed to in writing, software
-   distributed under the License is distributed on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-   See the License for the specific language governing permissions and
-   limitations under the License.
--->
-<metainfo>
-  <schemaVersion>2.0</schemaVersion>
-  <services>
-    <service>
-      <name>WEBHCAT</name>
-      <displayName>WebHCat</displayName>
-      <comment>Provides a REST-like web API for HCatalog and related Hadoop components.</comment>
-      <version>0.11.0.1.3.3.0</version>
-      <components>
-        <component>
-          <name>WEBHCAT_SERVER</name>
-          <displayName>WebHCat Server</displayName>
-          <category>MASTER</category>
-          <cardinality>1</cardinality>
-          <dependencies>
-            <dependency>
-              <name>HDFS/HDFS_CLIENT</name>
-              <scope>host</scope>
-              <auto-deploy>
-                <enabled>true</enabled>
-              </auto-deploy>
-            </dependency>
-            <dependency>
-              <name>MAPREDUCE/MAPREDUCE_CLIENT</name>
-              <scope>host</scope>
-              <auto-deploy>
-                <enabled>true</enabled>
-              </auto-deploy>
-            </dependency>
-            <dependency>
-              <name>ZOOKEEPER/ZOOKEEPER_SERVER</name>
-              <scope>cluster</scope>
-              <auto-deploy>
-                <enabled>true</enabled>
-                <co-locate>WEBHCAT/WEBHCAT_SERVER</co-locate>
-              </auto-deploy>
-            </dependency>
-            <dependency>
-              <name>ZOOKEEPER/ZOOKEEPER_CLIENT</name>
-              <scope>host</scope>
-              <auto-deploy>
-                <enabled>true</enabled>
-              </auto-deploy>
-            </dependency>
-          </dependencies>
-          <commandScript>
-            <script>scripts/webhcat_server.py</script>
-            <scriptType>PYTHON</scriptType>
-            <timeout>600</timeout>
-          </commandScript>
-        </component>
-      </components>
-      <osSpecifics>
-        <osSpecific>
-          <osFamily>any</osFamily>
-          <packages>
-            <package>
-              <name>hcatalog</name>
-            </package>
-            <package>
-              <name>webhcat-tar-hive</name>
-            </package>
-            <package>
-              <name>webhcat-tar-pig</name>
-            </package>
-          </packages>
-        </osSpecific>
-      </osSpecifics>
-      <commandScript>
-        <script>scripts/service_check.py</script>
-        <scriptType>PYTHON</scriptType>
-        <timeout>300</timeout>
-      </commandScript>
-      
-      <requiredServices>
-        <service>ZOOKEEPER</service>
-        <service>HIVE</service>
-      </requiredServices>
-      
-      <configuration-dependencies>
-        <config-type>webhcat-site</config-type>
-        <config-type>webhcat-env</config-type>
-      </configuration-dependencies>
-    </service>
-  </services>
-</metainfo>

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/WEBHCAT/package/files/templetonSmoke.sh
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/WEBHCAT/package/files/templetonSmoke.sh b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/WEBHCAT/package/files/templetonSmoke.sh
deleted file mode 100644
index 21204e6..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/WEBHCAT/package/files/templetonSmoke.sh
+++ /dev/null
@@ -1,96 +0,0 @@
-#!/bin/sh
-#
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-#
-#
-
-export ttonhost=$1
-export smoke_test_user=$2
-export smoke_user_keytab=$3
-export security_enabled=$4
-export kinit_path_local=$5
-export ttonurl="http://${ttonhost}:50111/templeton/v1"
-
-if [[ $security_enabled == "true" ]]; then
-  kinitcmd="${kinit_path_local}  -kt ${smoke_user_keytab} ${smoke_test_user}; "
-else
-  kinitcmd=""
-fi
-
-export no_proxy=$ttonhost
-cmd="${kinitcmd}curl --negotiate -u : -s -w 'http_code <%{http_code}>'    $ttonurl/status 2>&1"
-retVal=`su - ${smoke_test_user} -c "$cmd"`
-httpExitCode=`echo $retVal |sed 's/.*http_code <\([0-9]*\)>.*/\1/'`
-
-if [[ "$httpExitCode" -ne "200" ]] ; then
-  echo "Templeton Smoke Test (status cmd): Failed. : $retVal"
-  export TEMPLETON_EXIT_CODE=1
-  exit 1
-fi
-
-exit 0
-
-#try hcat ddl command
-echo "user.name=${smoke_test_user}&exec=show databases;" /tmp/show_db.post.txt
-cmd="${kinitcmd}curl --negotiate -u : -s -w 'http_code <%{http_code}>' -d  \@${destdir}/show_db.post.txt  $ttonurl/ddl 2>&1"
-retVal=`su - ${smoke_test_user} -c "$cmd"`
-httpExitCode=`echo $retVal |sed 's/.*http_code <\([0-9]*\)>.*/\1/'`
-
-if [[ "$httpExitCode" -ne "200" ]] ; then
-  echo "Templeton Smoke Test (ddl cmd): Failed. : $retVal"
-  export TEMPLETON_EXIT_CODE=1
-  exit  1
-fi
-
-# NOT SURE?? SUHAS
-if [[ $security_enabled == "true" ]]; then
-  echo "Templeton Pig Smoke Tests not run in secure mode"
-  exit 0
-fi
-
-#try pig query
-outname=${smoke_test_user}.`date +"%M%d%y"`.$$;
-ttonTestOutput="/tmp/idtest.${outname}.out";
-ttonTestInput="/tmp/idtest.${outname}.in";
-ttonTestScript="idtest.${outname}.pig"
-
-echo "A = load '$ttonTestInput' using PigStorage(':');"  > /tmp/$ttonTestScript
-echo "B = foreach A generate \$0 as id; " >> /tmp/$ttonTestScript
-echo "store B into '$ttonTestOutput';" >> /tmp/$ttonTestScript
-
-#copy pig script to hdfs
-su - ${smoke_test_user} -c "hadoop dfs -copyFromLocal /tmp/$ttonTestScript /tmp/$ttonTestScript"
-
-#copy input file to hdfs
-su - ${smoke_test_user} -c "hadoop dfs -copyFromLocal /etc/passwd $ttonTestInput"
-
-#create, copy post args file
-echo -n "user.name=${smoke_test_user}&file=/tmp/$ttonTestScript" > /tmp/pig_post.txt
-
-#submit pig query
-cmd="curl -s -w 'http_code <%{http_code}>' -d  \@${destdir}/pig_post.txt  $ttonurl/pig 2>&1"
-retVal=`su - ${smoke_test_user} -c "$cmd"`
-httpExitCode=`echo $retVal |sed 's/.*http_code <\([0-9]*\)>.*/\1/'`
-if [[ "$httpExitCode" -ne "200" ]] ; then
-  echo "Templeton Smoke Test (pig cmd): Failed. : $retVal"
-  export TEMPLETON_EXIT_CODE=1
-  exit 1
-fi
-
-exit 0

http://git-wip-us.apache.org/repos/asf/ambari/blob/601014ed/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/WEBHCAT/package/scripts/__init__.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/WEBHCAT/package/scripts/__init__.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/WEBHCAT/package/scripts/__init__.py
deleted file mode 100644
index 35de4bb..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/WEBHCAT/package/scripts/__init__.py
+++ /dev/null
@@ -1,20 +0,0 @@
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-Ambari Agent
-
-"""


[09/27] git commit: AMBARI-7307 Use fixed URL for repo for versioned RPMs. (dsen)

Posted by jo...@apache.org.
AMBARI-7307 Use fixed URL for repo for versioned RPMs. (dsen)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/007288a2
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/007288a2
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/007288a2

Branch: refs/heads/branch-alerts-dev
Commit: 007288a24bb3720117552d60a82beef7c6831263
Parents: 8dd0e81
Author: Dmytro Sen <ds...@hortonworks.com>
Authored: Mon Sep 15 19:26:54 2014 +0300
Committer: Dmytro Sen <ds...@hortonworks.com>
Committed: Mon Sep 15 19:26:54 2014 +0300

----------------------------------------------------------------------
 .../services/OOZIE/package/scripts/params.py    |  3 +-
 .../services/SQOOP/package/scripts/params.py    | 12 ++++--
 .../services/FALCON/package/scripts/params.py   |  9 +++--
 .../resources/stacks/HDP/2.2/repos/repoinfo.xml | 27 +++-----------
 .../stacks/HDP/2.2/services/FALCON/metainfo.xml | 12 +++++-
 .../stacks/HDP/2.2/services/FLUME/metainfo.xml  |  2 +-
 .../stacks/HDP/2.2/services/HBASE/metainfo.xml  |  2 +-
 .../services/HDFS/configuration/hadoop-env.xml  |  2 +-
 .../services/HDFS/configuration/hdfs-site.xml   |  2 +-
 .../stacks/HDP/2.2/services/HDFS/metainfo.xml   |  4 +-
 .../stacks/HDP/2.2/services/HIVE/metainfo.xml   | 39 +++++++++++++++++++-
 .../services/OOZIE/configuration/oozie-site.xml |  2 +-
 .../stacks/HDP/2.2/services/OOZIE/metainfo.xml  | 27 +++++++++++++-
 .../stacks/HDP/2.2/services/PIG/metainfo.xml    |  5 +--
 .../stacks/HDP/2.2/services/SQOOP/metainfo.xml  | 15 +++++++-
 .../stacks/HDP/2.2/services/STORM/metainfo.xml  | 12 +++++-
 .../stacks/HDP/2.2/services/TEZ/metainfo.xml    |  2 +-
 .../WEBHCAT/configuration/webhcat-site.xml      | 10 ++---
 .../HDP/2.2/services/WEBHCAT/metainfo.xml       |  2 +-
 .../YARN/configuration-mapred/mapred-site.xml   |  2 +-
 .../services/YARN/configuration/yarn-site.xml   |  2 +-
 .../stacks/HDP/2.2/services/YARN/metainfo.xml   |  6 +--
 .../HDP/2.2/services/ZOOKEEPER/metainfo.xml     |  2 +-
 23 files changed, 143 insertions(+), 58 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/007288a2/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/OOZIE/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/OOZIE/package/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/OOZIE/package/scripts/params.py
index ac26ede..4bce882 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/OOZIE/package/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/OOZIE/package/scripts/params.py
@@ -34,15 +34,16 @@ if rpm_version is not None:
   hadoop_bin_dir = format("/usr/hdp/{rpm_version}/hadoop/bin")
   hadoop_lib_home = format("/usr/hdp/{rpm_version}/hadoop/lib")
   mapreduce_libs_path = format("/usr/hdp/{rpm_version}/hadoop-mapreduce/*")
+  conf_dir = format("/usr/hdp/{rpm_version}/oozie/conf")
 else:
   hadoop_conf_dir = "/etc/hadoop/conf"
   hadoop_bin_dir = "/usr/bin"
   hadoop_lib_home = "/usr/lib/hadoop/lib"
   mapreduce_libs_path = "/usr/lib/hadoop-mapreduce/*"
+  conf_dir = "/etc/oozie/conf"
 
 oozie_user = config['configurations']['oozie-env']['oozie_user']
 smokeuser = config['configurations']['cluster-env']['smokeuser']
-conf_dir = "/etc/oozie/conf"
 user_group = config['configurations']['cluster-env']['user_group']
 jdk_location = config['hostLevelParams']['jdk_location']
 check_db_connection_jar_name = "DBConnectionVerification.jar"

http://git-wip-us.apache.org/repos/asf/ambari/blob/007288a2/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/SQOOP/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/SQOOP/package/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/SQOOP/package/scripts/params.py
index 9170fdc..5784f9d 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/SQOOP/package/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/SQOOP/package/scripts/params.py
@@ -27,18 +27,22 @@ rpm_version = default("/configurations/hadoop-env/rpm_version", None)
 #hadoop params
 if rpm_version is not None:
   zoo_conf_dir = format('/usr/hdp/{rpm_version}/etc/zookeeper')
+  sqoop_conf_dir = format('/usr/hdp/{rpm_version}/sqoop/conf')
+  sqoop_lib = format('/usr/hdp/{rpm_version}/sqoop/lib')
+  hbase_home = format('/usr/hdp/{rpm_version}/hbase')
+  hive_home = format('/usr/hdp/{rpm_version}/hive')
 else:
   zoo_conf_dir = "/etc/zookeeper"
+  sqoop_conf_dir = "/usr/lib/sqoop/conf"
+  sqoop_lib = "/usr/lib/sqoop/lib"
+  hbase_home = "/usr"
+  hive_home = "/usr"
 
 security_enabled = config['configurations']['cluster-env']['security_enabled']
 smokeuser = config['configurations']['cluster-env']['smokeuser']
 user_group = config['configurations']['cluster-env']['user_group']
 sqoop_env_sh_template = config['configurations']['sqoop-env']['content']
 
-sqoop_conf_dir = "/usr/lib/sqoop/conf"
-hbase_home = "/usr"
-hive_home = "/usr"
-sqoop_lib = "/usr/lib/sqoop/lib"
 sqoop_user = config['configurations']['sqoop-env']['sqoop_user']
 
 smoke_user_keytab = config['configurations']['cluster-env']['smokeuser_keytab']

http://git-wip-us.apache.org/repos/asf/ambari/blob/007288a2/ambari-server/src/main/resources/stacks/HDP/2.1/services/FALCON/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.1/services/FALCON/package/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/2.1/services/FALCON/package/scripts/params.py
index 79bdef3..734faf2 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.1/services/FALCON/package/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.1/services/FALCON/package/scripts/params.py
@@ -30,9 +30,15 @@ rpm_version = default("/configurations/hadoop-env/rpm_version", None)
 if rpm_version is not None:
   hadoop_conf_dir = format("/usr/hdp/{rpm_version}/etc/hadoop/conf")
   hadoop_bin_dir = format("/usr/hdp/{rpm_version}/hadoop/bin")
+  falcon_webapp_dir = format("/usr/hdp/{rpm_version}/falcon/webapp")
+  falcon_home = format("/usr/hdp/{rpm_version}/falcon")
+  falcon_conf_dir = format("/usr/hdp/{rpm_version}/falcon/conf")
 else:
   hadoop_conf_dir = "/etc/hadoop/conf"
   hadoop_bin_dir = "/usr/bin"
+  falcon_webapp_dir = '/var/lib/falcon/webapp'
+  falcon_home = '/usr/lib/falcon'
+  falcon_conf_dir = '/etc/falcon/conf'
 
 oozie_user = config['configurations']['oozie-env']['oozie_user']
 falcon_user = config['configurations']['falcon-env']['falcon_user']
@@ -42,8 +48,6 @@ user_group = config['configurations']['cluster-env']['user_group']
 proxyuser_group =  config['configurations']['hadoop-env']['proxyuser_group']
 
 java_home = config['hostLevelParams']['java_home']
-falcon_home = '/usr/lib/falcon'
-falcon_conf_dir = '/etc/falcon/conf'
 falcon_local_dir = config['configurations']['falcon-env']['falcon_local_dir']
 falcon_log_dir = config['configurations']['falcon-env']['falcon_log_dir']
 store_uri = config['configurations']['falcon-startup.properties']['*.config.store.uri']
@@ -59,7 +63,6 @@ falcon_startup_properties = config['configurations']['falcon-startup.properties'
 smokeuser_keytab = config['configurations']['cluster-env']['smokeuser_keytab']
 falcon_env_sh_template = config['configurations']['falcon-env']['content']
 
-falcon_webapp_dir = '/var/lib/falcon/webapp'
 flacon_apps_dir = '/apps/falcon'
 #for create_hdfs_directory
 security_enabled = config['configurations']['cluster-env']['security_enabled']

http://git-wip-us.apache.org/repos/asf/ambari/blob/007288a2/ambari-server/src/main/resources/stacks/HDP/2.2/repos/repoinfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2/repos/repoinfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.2/repos/repoinfo.xml
index c99f92a..cdb90ce 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.2/repos/repoinfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2/repos/repoinfo.xml
@@ -18,16 +18,11 @@
 <reposinfo>
   <os type="redhat6">
     <repo>
-      <baseurl>http://s3.amazonaws.com/dev.hortonworks.com/HDP/centos6/2.x/BUILDS/2.9.9.9-98</baseurl>
-      <repoid>HDP-2.9.9.9-98</repoid>
+      <baseurl>http://s3.amazonaws.com/dev.hortonworks.com/HDP/centos6/2.x/updates/2.9.9.9</baseurl>
+      <repoid>HDP-2.9.9.9</repoid>
       <reponame>HDP</reponame>
     </repo>
     <repo>
-      <baseurl>http://s3.amazonaws.com/dev.hortonworks.com/HDP/centos6/2.x/updates/2.2.0.0</baseurl>
-      <repoid>HDP-2.2.0.0</repoid>
-      <reponame>HDP-2.2</reponame>
-    </repo>
-    <repo>
       <baseurl>http://public-repo-1.hortonworks.com/HDP-UTILS-1.1.0.17/repos/centos6</baseurl>
       <repoid>HDP-UTILS-1.1.0.17</repoid>
       <reponame>HDP-UTILS</reponame>
@@ -35,16 +30,11 @@
   </os>
   <os type="redhat5">
     <repo>
-      <baseurl>http://s3.amazonaws.com/dev.hortonworks.com/HDP/centos5/2.x/BUILDS/2.9.9.9-98</baseurl>
-      <repoid>HDP-2.9.9.9-98</repoid>
+      <baseurl>http://s3.amazonaws.com/dev.hortonworks.com/HDP/centos5/2.x/updates/2.9.9.9</baseurl>
+      <repoid>HDP-2.9.9.9</repoid>
       <reponame>HDP</reponame>
     </repo>
     <repo>
-      <baseurl>http://s3.amazonaws.com/dev.hortonworks.com/HDP/centos5/2.x/updates/2.2.0.0</baseurl>
-      <repoid>HDP-2.2.0.0</repoid>
-      <reponame>HDP-2.2</reponame>
-    </repo>
-    <repo>
       <baseurl>http://public-repo-1.hortonworks.com/HDP-UTILS-1.1.0.17/repos/centos5</baseurl>
       <repoid>HDP-UTILS-1.1.0.17</repoid>
       <reponame>HDP-UTILS</reponame>
@@ -52,16 +42,11 @@
   </os>
   <os type="suse11">
     <repo>
-      <baseurl>http://s3.amazonaws.com/dev.hortonworks.com/HDP/suse11/2.x/BUILDS/2.9.9.9-98</baseurl>
-      <repoid>HDP-2.9.9.9-98</repoid>
+      <baseurl>http://s3.amazonaws.com/dev.hortonworks.com/HDP/suse11/2.x/updates/2.9.9.9</baseurl>
+      <repoid>HDP-2.9.9.9</repoid>
       <reponame>HDP</reponame>
     </repo>
     <repo>
-      <baseurl>http://s3.amazonaws.com/dev.hortonworks.com/HDP/suse11/2.x/updates/2.2.0.0</baseurl>
-      <repoid>HDP-2.2.0.0</repoid>
-      <reponame>HDP-2.2</reponame>
-    </repo>
-    <repo>
       <baseurl>http://public-repo-1.hortonworks.com/HDP-UTILS-1.1.0.17/repos/suse11</baseurl>
       <repoid>HDP-UTILS-1.1.0.17</repoid>
       <reponame>HDP-UTILS</reponame>

http://git-wip-us.apache.org/repos/asf/ambari/blob/007288a2/ambari-server/src/main/resources/stacks/HDP/2.2/services/FALCON/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2/services/FALCON/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.2/services/FALCON/metainfo.xml
index 4a46139..d77f7f8 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.2/services/FALCON/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2/services/FALCON/metainfo.xml
@@ -22,7 +22,17 @@
       <name>FALCON</name>
       <displayName>Falcon</displayName>
       <comment>Data management and processing platform</comment>
-      <version>0.6.0.2.2.0.0</version>
+      <version>0.6.0.2.2.9.9</version>
+      <osSpecifics>
+        <osSpecific>
+          <osFamily>any</osFamily>
+          <packages>
+            <package>
+              <name>falcon_2_9_9_9_117</name>
+            </package>
+          </packages>
+        </osSpecific>
+      </osSpecifics>
     </service>
   </services>
 </metainfo>

http://git-wip-us.apache.org/repos/asf/ambari/blob/007288a2/ambari-server/src/main/resources/stacks/HDP/2.2/services/FLUME/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2/services/FLUME/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.2/services/FLUME/metainfo.xml
index 6b702c8..b70fa1d 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.2/services/FLUME/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2/services/FLUME/metainfo.xml
@@ -29,7 +29,7 @@
           <osFamily>any</osFamily>
           <packages>
             <package>
-              <name>flume_2_9_9_9_98</name>
+              <name>flume_2_9_9_9_117</name>
             </package>
           </packages>
         </osSpecific>

http://git-wip-us.apache.org/repos/asf/ambari/blob/007288a2/ambari-server/src/main/resources/stacks/HDP/2.2/services/HBASE/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2/services/HBASE/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.2/services/HBASE/metainfo.xml
index 52cd10d..232054f 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.2/services/HBASE/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2/services/HBASE/metainfo.xml
@@ -31,7 +31,7 @@
           <osFamily>any</osFamily>
           <packages>
             <package>
-              <name>hbase_2_9_9_9_98</name>
+              <name>hbase_2_9_9_9_117</name>
             </package>
           </packages>
         </osSpecific>

http://git-wip-us.apache.org/repos/asf/ambari/blob/007288a2/ambari-server/src/main/resources/stacks/HDP/2.2/services/HDFS/configuration/hadoop-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2/services/HDFS/configuration/hadoop-env.xml b/ambari-server/src/main/resources/stacks/HDP/2.2/services/HDFS/configuration/hadoop-env.xml
index 3213506..460e946 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.2/services/HDFS/configuration/hadoop-env.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2/services/HDFS/configuration/hadoop-env.xml
@@ -23,7 +23,7 @@
 <configuration>
   <property>
     <name>rpm_version</name>
-    <value>2.9.9.9-98</value>
+    <value>2.9.9.9-117</value>
     <description>Hadoop RPM version</description>
   </property>
 </configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/007288a2/ambari-server/src/main/resources/stacks/HDP/2.2/services/HDFS/configuration/hdfs-site.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2/services/HDFS/configuration/hdfs-site.xml b/ambari-server/src/main/resources/stacks/HDP/2.2/services/HDFS/configuration/hdfs-site.xml
index 4f46cb7..af0064c 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.2/services/HDFS/configuration/hdfs-site.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2/services/HDFS/configuration/hdfs-site.xml
@@ -24,7 +24,7 @@
 
   <property>
     <name>dfs.hosts.exclude</name>
-    <value>/usr/hdp/2.9.9.9-98/etc/hadoop/conf/dfs.exclude</value>
+    <value>/usr/hdp/2.9.9.9-117/etc/hadoop/conf/dfs.exclude</value>
     <description>Names a file that contains a list of hosts that are
       not permitted to connect to the namenode.  The full pathname of the
       file must be specified.  If the value is empty, no hosts are

http://git-wip-us.apache.org/repos/asf/ambari/blob/007288a2/ambari-server/src/main/resources/stacks/HDP/2.2/services/HDFS/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2/services/HDFS/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.2/services/HDFS/metainfo.xml
index b520a34..91c5d63 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.2/services/HDFS/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2/services/HDFS/metainfo.xml
@@ -29,7 +29,7 @@
           <osFamily>any</osFamily>
           <packages>
             <package>
-              <name>hadoop_2_9_9_9_98</name>
+              <name>hadoop_2_9_9_9_117</name>
             </package>
             <package>
               <name>hadoop-lzo</name>
@@ -53,7 +53,7 @@
               <name>hadoop-lzo-native</name>
             </package>
             <package>
-              <name>hadoop_2_9_9_9_98-libhdfs</name>
+              <name>hadoop_2_9_9_9_117-libhdfs</name>
             </package>
             <package>
               <name>ambari-log4j</name>

http://git-wip-us.apache.org/repos/asf/ambari/blob/007288a2/ambari-server/src/main/resources/stacks/HDP/2.2/services/HIVE/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2/services/HIVE/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.2/services/HIVE/metainfo.xml
index 28567a7..7548296 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.2/services/HIVE/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2/services/HIVE/metainfo.xml
@@ -22,6 +22,43 @@
       <name>HIVE</name>
       <comment>Data warehouse system for ad-hoc queries &amp; analysis of large datasets and table &amp; storage management service</comment>
       <version>0.14.0.2.9.9.9</version>
+      <osSpecifics>
+        <osSpecific>
+          <osFamily>any</osFamily>
+          <packages>
+            <package>
+              <name>hive_2_9_9_9_117</name>
+            </package>
+            <package>
+              <name>mysql-connector-java</name>
+            </package>
+          </packages>
+        </osSpecific>
+        <osSpecific>
+          <osFamily>redhat5,redhat6,suse11</osFamily>
+          <packages>
+            <package>
+              <name>mysql</name>
+            </package>
+          </packages>
+        </osSpecific>
+        <osSpecific>
+          <osFamily>redhat5,redhat6,ubuntu12</osFamily>
+          <packages>
+            <package>
+              <name>mysql-server</name>
+            </package>
+          </packages>
+        </osSpecific>
+        <osSpecific>
+          <osFamily>suse11</osFamily>
+          <packages>
+            <package>
+              <name>mysql-client</name>
+            </package>
+          </packages>
+        </osSpecific>
+      </osSpecifics>
     </service>
 
     <service>
@@ -33,7 +70,7 @@
           <osFamily>any</osFamily>
           <packages>
             <package>
-              <name>hive_2_9_9_9_98-hcatalog</name>
+              <name>hive_2_9_9_9_117-hcatalog</name>
             </package>
           </packages>
         </osSpecific>

http://git-wip-us.apache.org/repos/asf/ambari/blob/007288a2/ambari-server/src/main/resources/stacks/HDP/2.2/services/OOZIE/configuration/oozie-site.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2/services/OOZIE/configuration/oozie-site.xml b/ambari-server/src/main/resources/stacks/HDP/2.2/services/OOZIE/configuration/oozie-site.xml
index 8e8be78..b725341 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.2/services/OOZIE/configuration/oozie-site.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2/services/OOZIE/configuration/oozie-site.xml
@@ -21,7 +21,7 @@
 
   <property>
     <name>oozie.service.HadoopAccessorService.hadoop.configurations</name>
-    <value>*=/usr/hdp/2.9.9.9-98/etc/hadoop/conf</value>
+    <value>*=/usr/hdp/2.9.9.9-117/etc/hadoop/conf</value>
     <description>
       Comma separated AUTHORITY=HADOOP_CONF_DIR, where AUTHORITY is the HOST:PORT of
       the Hadoop service (JobTracker, HDFS). The wildcard '*' configuration is

http://git-wip-us.apache.org/repos/asf/ambari/blob/007288a2/ambari-server/src/main/resources/stacks/HDP/2.2/services/OOZIE/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2/services/OOZIE/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.2/services/OOZIE/metainfo.xml
index 5c77061..5292879 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.2/services/OOZIE/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2/services/OOZIE/metainfo.xml
@@ -22,7 +22,32 @@
       <name>OOZIE</name>
       <comment>System for workflow coordination and execution of Apache Hadoop jobs.  This also includes the installation of the optional Oozie Web Console which relies on and will install the &lt;a target="_blank" href="http://www.sencha.com/legal/open-source-faq/"&gt;ExtJS&lt;/a&gt; Library.
       </comment>
-      <version>4.1.0.2.2.0.0</version>
+      <version>4.1.0.2.2.9.9</version>
+      <osSpecifics>
+        <osSpecific>
+          <osFamily>any</osFamily>
+          <packages>
+            <package>
+              <name>oozie_2_9_9_9_117</name>
+            </package>
+            <package>
+              <name>oozie_2_9_9_9_117-client</name>
+            </package>
+            <package>
+              <name>falcon_2_9_9_9_117</name>
+            </package>
+            <package>
+              <name>zip</name>
+            </package>
+            <package>
+              <name>mysql-connector-java</name>
+            </package>
+            <package>
+              <name>extjs</name>
+            </package>
+          </packages>
+        </osSpecific>
+      </osSpecifics>
     </service>
   </services>
 </metainfo>

http://git-wip-us.apache.org/repos/asf/ambari/blob/007288a2/ambari-server/src/main/resources/stacks/HDP/2.2/services/PIG/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2/services/PIG/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.2/services/PIG/metainfo.xml
index 335993f..555cb41 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.2/services/PIG/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2/services/PIG/metainfo.xml
@@ -23,19 +23,16 @@
       <displayName>Pig</displayName>
       <comment>Scripting platform for analyzing large datasets</comment>
       <version>0.14.0.2.9.9.9</version>
-
       <osSpecifics>
         <osSpecific>
           <osFamily>any</osFamily>
           <packages>
             <package>
-              <name>pig_2_9_9_9_98</name>
+              <name>pig_2_9_9_9_117</name>
             </package>
           </packages>
         </osSpecific>
       </osSpecifics>
-
-
     </service>
   </services>
 </metainfo>

http://git-wip-us.apache.org/repos/asf/ambari/blob/007288a2/ambari-server/src/main/resources/stacks/HDP/2.2/services/SQOOP/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2/services/SQOOP/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.2/services/SQOOP/metainfo.xml
index f644d74..d3e1de7 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.2/services/SQOOP/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2/services/SQOOP/metainfo.xml
@@ -23,7 +23,20 @@
       <comment>Tool for transferring bulk data between Apache Hadoop and
         structured data stores such as relational databases
       </comment>
-      <version>1.4.5.2.2</version>
+      <version>1.4.5.2.9.9.9</version>
+      <osSpecifics>
+        <osSpecific>
+          <osFamily>any</osFamily>
+          <packages>
+            <package>
+              <name>sqoop_2_9_9_9_117</name>
+            </package>
+            <package>
+              <name>mysql-connector-java</name>
+            </package>
+          </packages>
+        </osSpecific>
+      </osSpecifics>
     </service>
   </services>
 </metainfo>

http://git-wip-us.apache.org/repos/asf/ambari/blob/007288a2/ambari-server/src/main/resources/stacks/HDP/2.2/services/STORM/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2/services/STORM/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.2/services/STORM/metainfo.xml
index c25718d..d2bf9d6 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.2/services/STORM/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2/services/STORM/metainfo.xml
@@ -23,7 +23,17 @@
       <name>STORM</name>
       <displayName>Storm</displayName>
       <comment>Apache Hadoop Stream processing framework</comment>
-      <version>0.9.3.2.2.0.0</version>
+      <version>0.9.3.2.9.9.9</version>
+      <osSpecifics>
+        <osSpecific>
+          <osFamily>any</osFamily>
+          <packages>
+            <package>
+              <name>storm_2_9_9_9_117</name>
+            </package>
+          </packages>
+        </osSpecific>
+      </osSpecifics>
     </service>
   </services>
 </metainfo>

http://git-wip-us.apache.org/repos/asf/ambari/blob/007288a2/ambari-server/src/main/resources/stacks/HDP/2.2/services/TEZ/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2/services/TEZ/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.2/services/TEZ/metainfo.xml
index 25f579a..3c04a41 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.2/services/TEZ/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2/services/TEZ/metainfo.xml
@@ -29,7 +29,7 @@
           <osFamily>any</osFamily>
           <packages>
             <package>
-              <name>tez_2_9_9_9_98</name>
+              <name>tez_2_9_9_9_117</name>
             </package>
           </packages>
         </osSpecific>

http://git-wip-us.apache.org/repos/asf/ambari/blob/007288a2/ambari-server/src/main/resources/stacks/HDP/2.2/services/WEBHCAT/configuration/webhcat-site.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2/services/WEBHCAT/configuration/webhcat-site.xml b/ambari-server/src/main/resources/stacks/HDP/2.2/services/WEBHCAT/configuration/webhcat-site.xml
index d14be36..ce9f2c5 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.2/services/WEBHCAT/configuration/webhcat-site.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2/services/WEBHCAT/configuration/webhcat-site.xml
@@ -25,33 +25,33 @@ limitations under the License.
 
   <property>
     <name>templeton.hadoop.conf.dir</name>
-    <value>/usr/hdp/2.9.9.9-98/etc/hadoop/conf</value>
+    <value>/usr/hdp/2.9.9.9-117/etc/hadoop/conf</value>
     <description>The path to the Hadoop configuration.</description>
   </property>
 
   <property>
     <name>templeton.jar</name>
-    <value>/usr/hdp/2.9.9.9-98/hcatalog/share/webhcat/svr/webhcat.jar</value>
+    <value>/usr/hdp/2.9.9.9-117/hcatalog/share/webhcat/svr/webhcat.jar</value>
     <description>The path to the Templeton jar file.</description>
   </property>
 
   <property>
     <name>templeton.libjars</name>
-    <value>/usr/hdp/2.9.9.9-98/zookeeper/zookeeper.jar</value>
+    <value>/usr/hdp/2.9.9.9-117/zookeeper/zookeeper.jar</value>
     <description>Jars to add the the classpath.</description>
   </property>
 
 
   <property>
     <name>templeton.hadoop</name>
-    <value>/usr/hdp/2.9.9.9-98/hadoop/bin/hadoop</value>
+    <value>/usr/hdp/2.9.9.9-117/hadoop/bin/hadoop</value>
     <description>The path to the Hadoop executable.</description>
   </property>
 
 
   <property>
     <name>templeton.hcat</name>
-    <value>/usr/hdp/2.9.9.9-98/hive/bin/hcat</value>
+    <value>/usr/hdp/2.9.9.9-117/hive/bin/hcat</value>
     <description>The path to the hcatalog executable.</description>
   </property>
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/007288a2/ambari-server/src/main/resources/stacks/HDP/2.2/services/WEBHCAT/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2/services/WEBHCAT/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.2/services/WEBHCAT/metainfo.xml
index a05f9e7..3a0a6a3 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.2/services/WEBHCAT/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2/services/WEBHCAT/metainfo.xml
@@ -27,7 +27,7 @@
           <osFamily>any</osFamily>
           <packages>
             <package>
-              <name>hive_2_9_9_9_98-webhcat</name>
+              <name>hive_2_9_9_9_117-webhcat</name>
             </package>
             <package>
               <name>webhcat-tar-hive</name>

http://git-wip-us.apache.org/repos/asf/ambari/blob/007288a2/ambari-server/src/main/resources/stacks/HDP/2.2/services/YARN/configuration-mapred/mapred-site.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2/services/YARN/configuration-mapred/mapred-site.xml b/ambari-server/src/main/resources/stacks/HDP/2.2/services/YARN/configuration-mapred/mapred-site.xml
index a831936..91c9e09 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.2/services/YARN/configuration-mapred/mapred-site.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2/services/YARN/configuration-mapred/mapred-site.xml
@@ -24,7 +24,7 @@
 
   <property>
     <name>mapreduce.admin.user.env</name>
-    <value>LD_LIBRARY_PATH=/usr/lib/hadoop/lib/native:/usr/hdp/2.9.9.9-98/hadoop/lib/native/Linux-amd64-64</value>
+    <value>LD_LIBRARY_PATH=/usr/lib/hadoop/lib/native:/usr/hdp/2.9.9.9-117/hadoop/lib/native/Linux-amd64-64</value>
     <description>
       Additional execution environment entries for map and reduce task processes.
       This is not an additive property. You must preserve the original value if

http://git-wip-us.apache.org/repos/asf/ambari/blob/007288a2/ambari-server/src/main/resources/stacks/HDP/2.2/services/YARN/configuration/yarn-site.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2/services/YARN/configuration/yarn-site.xml b/ambari-server/src/main/resources/stacks/HDP/2.2/services/YARN/configuration/yarn-site.xml
index 065f57e..6bcf82e 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.2/services/YARN/configuration/yarn-site.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2/services/YARN/configuration/yarn-site.xml
@@ -23,7 +23,7 @@
 
   <property>
     <name>yarn.resourcemanager.nodes.exclude-path</name>
-    <value>/usr/hdp/2.9.9.9-98/etc/hadoop/conf/yarn.exclude</value>
+    <value>/usr/hdp/2.9.9.9-117/etc/hadoop/conf/yarn.exclude</value>
     <description>
       Names a file that contains a list of hosts that are
       not permitted to connect to the resource manager.  The full pathname of the

http://git-wip-us.apache.org/repos/asf/ambari/blob/007288a2/ambari-server/src/main/resources/stacks/HDP/2.2/services/YARN/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2/services/YARN/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.2/services/YARN/metainfo.xml
index 7a30894..44fa72f 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.2/services/YARN/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2/services/YARN/metainfo.xml
@@ -38,10 +38,10 @@
           <osFamily>any</osFamily>
           <packages>
             <package>
-              <name>hadoop_2_9_9_9_98-yarn</name>
+              <name>hadoop_2_9_9_9_117-yarn</name>
             </package>
             <package>
-              <name>hadoop_2_9_9_9_98-mapreduce</name>
+              <name>hadoop_2_9_9_9_117-mapreduce</name>
             </package>
           </packages>
         </osSpecific>
@@ -58,7 +58,7 @@
           <osFamily>any</osFamily>
           <packages>
             <package>
-              <name>hadoop_2_9_9_9_98-mapreduce</name>
+              <name>hadoop_2_9_9_9_117-mapreduce</name>
             </package>
           </packages>
         </osSpecific>

http://git-wip-us.apache.org/repos/asf/ambari/blob/007288a2/ambari-server/src/main/resources/stacks/HDP/2.2/services/ZOOKEEPER/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2/services/ZOOKEEPER/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.2/services/ZOOKEEPER/metainfo.xml
index 525faef..329aa50 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.2/services/ZOOKEEPER/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2/services/ZOOKEEPER/metainfo.xml
@@ -29,7 +29,7 @@
           <osFamily>any</osFamily>
           <packages>
             <package>
-              <name>zookeeper_2_9_9_9_98</name>
+              <name>zookeeper_2_9_9_9_117</name>
             </package>
           </packages>
         </osSpecific>


[23/27] git commit: AMBARI-7190 - Views: attempting to access views during extraction throws exception (reopened)

Posted by jo...@apache.org.
AMBARI-7190 - Views: attempting to access views during extraction throws exception (reopened)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/a412da83
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/a412da83
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/a412da83

Branch: refs/heads/branch-alerts-dev
Commit: a412da83c531ab4c6b0034c4dc087c5b58dff74c
Parents: 26cd2f4
Author: tbeerbower <tb...@hortonworks.com>
Authored: Mon Sep 15 17:58:19 2014 -0400
Committer: tbeerbower <tb...@hortonworks.com>
Committed: Mon Sep 15 20:34:54 2014 -0400

----------------------------------------------------------------------
 .../apache/ambari/server/view/ViewRegistry.java |  6 +++--
 .../ambari/server/view/ViewRegistryTest.java    | 23 ++++++++++++++++++++
 2 files changed, 27 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/a412da83/ambari-server/src/main/java/org/apache/ambari/server/view/ViewRegistry.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/view/ViewRegistry.java b/ambari-server/src/main/java/org/apache/ambari/server/view/ViewRegistry.java
index 17d77a6..8f6774e 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/view/ViewRegistry.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/view/ViewRegistry.java
@@ -261,8 +261,10 @@ public class ViewRegistry {
   public ViewEntity getDefinition(ResourceTypeEntity resourceTypeEntity) {
 
     for (ViewEntity viewEntity : viewDefinitions.values()) {
-      if (viewEntity.getResourceType().equals(resourceTypeEntity)) {
-        return viewEntity;
+      if (viewEntity.isDeployed()) {
+        if (viewEntity.getResourceType().equals(resourceTypeEntity)) {
+          return viewEntity;
+        }
       }
     }
     return null;

http://git-wip-us.apache.org/repos/asf/ambari/blob/a412da83/ambari-server/src/test/java/org/apache/ambari/server/view/ViewRegistryTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/view/ViewRegistryTest.java b/ambari-server/src/test/java/org/apache/ambari/server/view/ViewRegistryTest.java
index 8e36dba..eba0c25 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/view/ViewRegistryTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/view/ViewRegistryTest.java
@@ -533,6 +533,29 @@ public class ViewRegistryTest {
   }
 
   @Test
+  public void testGetDefinition() throws Exception {
+    ViewEntity viewDefinition = ViewEntityTest.getViewEntity();
+
+    ViewRegistry registry = ViewRegistry.getInstance();
+
+    ResourceTypeEntity resourceTypeEntity = new ResourceTypeEntity();
+    resourceTypeEntity.setId(10);
+    resourceTypeEntity.setName(viewDefinition.getName());
+
+    viewDefinition.setResourceType(resourceTypeEntity);
+
+    registry.addDefinition(viewDefinition);
+
+    viewDefinition.setStatus(ViewDefinition.ViewStatus.DEPLOYING);
+
+    Assert.assertNull(registry.getDefinition(resourceTypeEntity));
+
+    viewDefinition.setStatus(ViewDefinition.ViewStatus.DEPLOYED);
+
+    Assert.assertEquals(viewDefinition, registry.getDefinition(resourceTypeEntity));
+  }
+
+  @Test
   public void testAddGetInstanceDefinitions() throws Exception {
     ViewEntity viewDefinition = ViewEntityTest.getViewEntity();
     ViewInstanceEntity viewInstanceDefinition = ViewInstanceEntityTest.getViewInstanceEntity();