You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by nc...@apache.org on 2016/02/22 17:25:31 UTC

[1/3] ambari git commit: Revert "Merge with trunk"

Repository: ambari
Updated Branches:
  refs/heads/branch-dev-patch-upgrade dea22be16 -> 4d3839c7f


http://git-wip-us.apache.org/repos/asf/ambari/blob/4d3839c7/ambari-web/test/views/main/dashboard/widgets/hawqsegment_live_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/views/main/dashboard/widgets/hawqsegment_live_test.js b/ambari-web/test/views/main/dashboard/widgets/hawqsegment_live_test.js
deleted file mode 100644
index eb8a505..0000000
--- a/ambari-web/test/views/main/dashboard/widgets/hawqsegment_live_test.js
+++ /dev/null
@@ -1,69 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-var App = require('app');
-
-require('views/main/dashboard/widget');
-require('views/main/dashboard/widgets/text_widget');
-require('views/main/dashboard/widgets/hawqsegment_live');
-
-describe('App.HawqSegmentUpView', function() {
-
-  var tests = [
-    {
-      data: 100,
-      e: {
-        isRed: false,
-        isOrange: false,
-        isGreen: true
-      }
-    },
-    {
-      data: 0,
-      e: {
-        isRed: true,
-        isOrange: false,
-        isGreen: false
-      }
-    },
-    {
-      data: 50,
-      e: {
-        isRed: false,
-        isOrange: true,
-        isGreen: false
-      }
-    }
-  ];
-
-  tests.forEach(function(test) {
-    describe('', function() {
-      var hawqSegmentUpView = App.HawqSegmentUpView.create({model_type:null, data: test.data, content: test.data.toString()});
-      it('shows red', function() {
-        expect(hawqSegmentUpView.get('isRed')).to.equal(test.e.isRed);
-      });
-      it('shows orange', function() {
-        expect(hawqSegmentUpView.get('isOrange')).to.equal(test.e.isOrange);
-      });
-      it('shows green', function() {
-        expect(hawqSegmentUpView.get('isGreen')).to.equal(test.e.isGreen);
-      });
-    });
-  });
-
-});


[3/3] ambari git commit: Revert "Merge with trunk"

Posted by nc...@apache.org.
Revert "Merge with trunk"

This reverts commit dea22be16c172ed0ca7a6e66ac29bda04027ca2f.


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/4d3839c7
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/4d3839c7
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/4d3839c7

Branch: refs/heads/branch-dev-patch-upgrade
Commit: 4d3839c7f9ce921e92de82bc66440167cb6173a9
Parents: dea22be
Author: Nate Cole <nc...@hortonworks.com>
Authored: Mon Feb 22 11:24:56 2016 -0500
Committer: Nate Cole <nc...@hortonworks.com>
Committed: Mon Feb 22 11:24:56 2016 -0500

----------------------------------------------------------------------
 .../python/resource_management/core/logger.py   |   2 +-
 .../timeline/AbstractTimelineMetricsSink.java   |  53 +-----
 .../ApplicationHistoryServer.java               |   6 +-
 .../loadsimulator/net/RestMetricsSender.java    |   4 +-
 .../ApplicationHistoryStoreTestUtils.java       |   2 +-
 .../TestApplicationHistoryClientService.java    |   6 +-
 .../TestFileSystemApplicationHistoryStore.java  |  12 +-
 .../TestMemoryApplicationHistoryStore.java      |  12 +-
 .../webapp/TestAHSWebServices.java              |   2 +-
 .../server/configuration/Configuration.java     |  89 ---------
 .../server/controller/ControllerModule.java     |  16 --
 .../controller/ServiceComponentRequest.java     |  35 +---
 .../controller/ServiceComponentResponse.java    |  22 +--
 .../internal/ComponentResourceProvider.java     |  38 ----
 .../server/orm/dao/HostRoleCommandDAO.java      | 166 +++-------------
 .../orm/entities/HostRoleCommandEntity.java     |   7 +-
 .../ServiceComponentDesiredStateEntity.java     |  11 --
 .../serveraction/ServerActionExecutor.java      |  13 +-
 .../ambari/server/state/ServiceComponent.java   |  14 --
 .../server/state/ServiceComponentImpl.java      |  80 +-------
 .../server/state/cluster/ClusterImpl.java       |  36 ++--
 .../services/AlertNoticeDispatchService.java    |  17 +-
 .../server/upgrade/UpgradeCatalog222.java       |   5 -
 .../server/upgrade/UpgradeCatalog240.java       |  14 --
 .../main/resources/Ambari-DDL-Derby-CREATE.sql  |   2 -
 .../main/resources/Ambari-DDL-MySQL-CREATE.sql  |   3 -
 .../main/resources/Ambari-DDL-Oracle-CREATE.sql |   3 -
 .../resources/Ambari-DDL-Postgres-CREATE.sql    |   5 -
 .../Ambari-DDL-Postgres-EMBEDDED-CREATE.sql     |   3 -
 .../resources/Ambari-DDL-SQLAnywhere-CREATE.sql |   3 -
 .../resources/Ambari-DDL-SQLServer-CREATE.sql   |   5 +-
 .../src/main/resources/alert-templates.xml      |  20 +-
 .../0.8.1.2.2/configuration/kafka-broker.xml    |   1 +
 .../src/main/resources/properties.json          |   1 -
 .../main/resources/scripts/Ambaripreupload.py   |  41 ++--
 .../stacks/HDP/2.0.6/services/stack_advisor.py  |   5 +-
 .../stacks/HDP/2.2/services/stack_advisor.py    |  17 +-
 .../stacks/HDP/2.3/services/stack_advisor.py    |  32 ++--
 .../stacks/HDP/2.4/services/HIVE/metainfo.xml   |  41 +---
 .../actionmanager/TestActionScheduler.java      |  24 +--
 .../ambari/server/agent/AgentResourceTest.java  |   2 -
 .../server/configuration/ConfigurationTest.java |  95 ----------
 .../AmbariManagementControllerTest.java         |   3 -
 .../server/controller/KerberosHelperTest.java   |   2 -
 .../internal/ComponentResourceProviderTest.java |  37 ++--
 .../ambari/server/stack/StackManagerTest.java   |  35 ++--
 .../ambari/server/state/ConfigHelperTest.java   |   2 -
 .../server/upgrade/UpgradeCatalog222Test.java   |  55 +-----
 .../server/upgrade/UpgradeCatalog240Test.java   |  12 --
 .../ambari/server/utils/StageUtilsTest.java     |   2 -
 .../stacks/2.0.6/common/test_stack_advisor.py   |  12 +-
 .../stacks/2.2/common/test_stack_advisor.py     |  14 +-
 .../stacks/2.3/common/test_stack_advisor.py     |  46 -----
 ambari-web/app/assets/test/tests.js             |   1 -
 .../hawq/addStandby/step3_controller.js         |   2 +-
 .../app/mappers/components_state_mapper.js      |   5 -
 ambari-web/app/messages.js                      |   5 -
 .../app/models/alerts/alert_definition.js       |   4 +-
 ambari-web/app/views.js                         |   1 -
 .../configs/widgets/list_config_widget_view.js  |  11 +-
 ambari-web/app/views/main/dashboard/widgets.js  |  19 +-
 .../main/dashboard/widgets/hawqsegment_live.js  | 190 -------------------
 .../dashboard/widgets/hawqsegment_live_test.js  |  69 -------
 63 files changed, 222 insertions(+), 1270 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/4d3839c7/ambari-common/src/main/python/resource_management/core/logger.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/core/logger.py b/ambari-common/src/main/python/resource_management/core/logger.py
index 5bbd35b..fd05b02 100644
--- a/ambari-common/src/main/python/resource_management/core/logger.py
+++ b/ambari-common/src/main/python/resource_management/core/logger.py
@@ -173,4 +173,4 @@ class Logger:
     if arguments_str:
       arguments_str = arguments_str[:-2]
         
-    return unicode("{0} {{{1}}}", 'UTF-8').format(name, arguments_str)
\ No newline at end of file
+    return unicode("{0} {{{1}}}").format(name, arguments_str)
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/4d3839c7/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/AbstractTimelineMetricsSink.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/AbstractTimelineMetricsSink.java b/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/AbstractTimelineMetricsSink.java
index b2810b7..2854898 100644
--- a/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/AbstractTimelineMetricsSink.java
+++ b/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/AbstractTimelineMetricsSink.java
@@ -28,12 +28,9 @@ import javax.net.ssl.HttpsURLConnection;
 import javax.net.ssl.SSLContext;
 import javax.net.ssl.SSLSocketFactory;
 import javax.net.ssl.TrustManagerFactory;
-import java.io.BufferedReader;
 import java.io.File;
 import java.io.FileInputStream;
 import java.io.IOException;
-import java.io.InputStream;
-import java.io.InputStreamReader;
 import java.io.OutputStream;
 import java.net.HttpURLConnection;
 import java.net.URL;
@@ -75,19 +72,17 @@ public abstract class AbstractTimelineMetricsSink {
   protected void emitMetrics(TimelineMetrics metrics) {
     String connectUrl = getCollectorUri();
     int timeout = getTimeoutSeconds() * 1000;
-    HttpURLConnection connection = null;
     try {
       if (connectUrl == null) {
         throw new IOException("Unknown URL. " +
           "Unable to connect to metrics collector.");
       }
       String jsonData = mapper.writeValueAsString(metrics);
-      connection = connectUrl.startsWith("https") ?
+      HttpURLConnection connection = connectUrl.startsWith("https") ?
         getSSLConnection(connectUrl) : getConnection(connectUrl);
 
       connection.setRequestMethod("POST");
       connection.setRequestProperty("Content-Type", "application/json");
-      connection.setRequestProperty("Connection", "Keep-Alive");
       connection.setConnectTimeout(timeout);
       connection.setReadTimeout(timeout);
       connection.setDoOutput(true);
@@ -108,52 +103,14 @@ public abstract class AbstractTimelineMetricsSink {
           LOG.debug("Metrics posted to Collector " + connectUrl);
         }
       }
-      cleanupInputStream(connection.getInputStream());
-    } catch (IOException ioe) {
-      StringBuilder errorMessage =
-        new StringBuilder("Unable to connect to collector, " + connectUrl + "\n");
-      try {
-        if ((connection != null)) {
-          errorMessage.append(cleanupInputStream(connection.getErrorStream()));
-        }
-      } catch (IOException e) {
-        //NOP
-      }
+    } catch (IOException e) {
       if (LOG.isDebugEnabled()) {
-        LOG.debug(errorMessage, ioe);
+        LOG.debug("Unable to connect to collector, " + connectUrl, e);
       } else {
-        LOG.info(errorMessage);
-      }
-      throw new UnableToConnectException(ioe).setConnectUrl(connectUrl);
-    }
-  }
-
-  /**
-   * Cleans up and closes an input stream
-   * see http://docs.oracle.com/javase/6/docs/technotes/guides/net/http-keepalive.html
-   * @param is the InputStream to clean up
-   * @return string read from the InputStream
-   * @throws IOException
-   */
-  private String cleanupInputStream(InputStream is) throws IOException {
-    StringBuilder sb = new StringBuilder();
-    if (is != null) {
-      try (
-        InputStreamReader isr = new InputStreamReader(is);
-        BufferedReader br = new BufferedReader(isr)
-      ) {
-        // read the response body
-        String line;
-        while ((line = br.readLine()) != null) {
-          if (LOG.isDebugEnabled()) {
-            sb.append(line);
-          }
-        }
-      } finally {
-        is.close();
+        LOG.info("Unable to connect to collector, " + connectUrl);
       }
+      throw new UnableToConnectException(e).setConnectUrl(connectUrl);
     }
-    return sb.toString();
   }
 
   // Get a connection

http://git-wip-us.apache.org/repos/asf/ambari/blob/4d3839c7/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryServer.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryServer.java b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryServer.java
index 1ca9c33..62a8cc3 100644
--- a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryServer.java
+++ b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryServer.java
@@ -55,8 +55,8 @@ import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.ti
 public class ApplicationHistoryServer extends CompositeService {
 
   public static final int SHUTDOWN_HOOK_PRIORITY = 30;
-  private static final Log LOG =
-    LogFactory.getLog(ApplicationHistoryServer.class);
+  private static final Log LOG = LogFactory
+    .getLog(ApplicationHistoryServer.class);
 
   ApplicationHistoryClientService ahsClientService;
   ApplicationHistoryManager historyManager;
@@ -172,8 +172,6 @@ public class ApplicationHistoryServer extends CompositeService {
     LOG.info("Instantiating AHSWebApp at " + bindAddress);
     try {
       Configuration conf = metricConfiguration.getMetricsConf();
-      conf.set("hadoop.http.max.threads", String.valueOf(metricConfiguration
-        .getTimelineMetricsServiceHandlerThreadCount()));
       HttpConfig.Policy policy = HttpConfig.Policy.valueOf(
         conf.get(TimelineMetricConfiguration.TIMELINE_SERVICE_HTTP_POLICY,
           HttpConfig.Policy.HTTP_ONLY.name()));

http://git-wip-us.apache.org/repos/asf/ambari/blob/4d3839c7/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/net/RestMetricsSender.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/net/RestMetricsSender.java b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/net/RestMetricsSender.java
index 32af851..0a9a513 100644
--- a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/net/RestMetricsSender.java
+++ b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/net/RestMetricsSender.java
@@ -24,7 +24,6 @@ import org.slf4j.LoggerFactory;
 import java.io.IOException;
 import java.net.MalformedURLException;
 import java.net.ProtocolException;
-import java.util.concurrent.TimeUnit;
 
 /**
  * Implements MetricsSender and provides a way of pushing metrics to application metrics history service using REST
@@ -66,8 +65,7 @@ public class RestMetricsSender implements MetricsSender {
       responseString = svc.send(payload);
 
       timer.stop();
-      LOG.info("http response time: " + timer.elapsed(TimeUnit.MILLISECONDS)
-        + " ms");
+      LOG.info("http response time: " + timer.elapsedMillis() + " ms");
 
       if (responseString.length() > 0) {
         LOG.debug("POST response from server: " + responseString);

http://git-wip-us.apache.org/repos/asf/ambari/blob/4d3839c7/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryStoreTestUtils.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryStoreTestUtils.java b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryStoreTestUtils.java
index ec9b49d..c41b8a7 100644
--- a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryStoreTestUtils.java
+++ b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryStoreTestUtils.java
@@ -58,7 +58,7 @@ public class ApplicationHistoryStoreTestUtils {
       ApplicationAttemptId appAttemptId) throws IOException {
     store.applicationAttemptStarted(ApplicationAttemptStartData.newInstance(
       appAttemptId, appAttemptId.toString(), 0,
-      ContainerId.newContainerId(appAttemptId, 1)));
+      ContainerId.newInstance(appAttemptId, 1)));
   }
 
   protected void writeApplicationAttemptFinishData(

http://git-wip-us.apache.org/repos/asf/ambari/blob/4d3839c7/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestApplicationHistoryClientService.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestApplicationHistoryClientService.java b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestApplicationHistoryClientService.java
index f93ac5e..2fdedb2 100644
--- a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestApplicationHistoryClientService.java
+++ b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestApplicationHistoryClientService.java
@@ -168,7 +168,7 @@ public class TestApplicationHistoryClientService extends
     writeApplicationStartData(appId);
     ApplicationAttemptId appAttemptId =
         ApplicationAttemptId.newInstance(appId, 1);
-    ContainerId containerId = ContainerId.newContainerId(appAttemptId, 1);
+    ContainerId containerId = ContainerId.newInstance(appAttemptId, 1);
     writeContainerStartData(containerId);
     writeContainerFinishData(containerId);
     writeApplicationFinishData(appId);
@@ -189,8 +189,8 @@ public class TestApplicationHistoryClientService extends
     writeApplicationStartData(appId);
     ApplicationAttemptId appAttemptId =
         ApplicationAttemptId.newInstance(appId, 1);
-    ContainerId containerId = ContainerId.newContainerId(appAttemptId, 1);
-    ContainerId containerId1 = ContainerId.newContainerId(appAttemptId, 2);
+    ContainerId containerId = ContainerId.newInstance(appAttemptId, 1);
+    ContainerId containerId1 = ContainerId.newInstance(appAttemptId, 2);
     writeContainerStartData(containerId);
     writeContainerFinishData(containerId);
     writeContainerStartData(containerId1);

http://git-wip-us.apache.org/repos/asf/ambari/blob/4d3839c7/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestFileSystemApplicationHistoryStore.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestFileSystemApplicationHistoryStore.java b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestFileSystemApplicationHistoryStore.java
index 543c25b..bc16d36 100644
--- a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestFileSystemApplicationHistoryStore.java
+++ b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestFileSystemApplicationHistoryStore.java
@@ -94,7 +94,7 @@ public class TestFileSystemApplicationHistoryStore extends
         }
         // write container history data
         for (int k = 1; k <= num; ++k) {
-          ContainerId containerId = ContainerId.newContainerId(appAttemptId, k);
+          ContainerId containerId = ContainerId.newInstance(appAttemptId, k);
           writeContainerStartData(containerId);
           if (missingContainer && k == num) {
             continue;
@@ -144,7 +144,7 @@ public class TestFileSystemApplicationHistoryStore extends
         // read container history data
         Assert.assertEquals(num, store.getContainers(appAttemptId).size());
         for (int k = 1; k <= num; ++k) {
-          ContainerId containerId = ContainerId.newContainerId(appAttemptId, k);
+          ContainerId containerId = ContainerId.newInstance(appAttemptId, k);
           ContainerHistoryData containerData = store.getContainer(containerId);
           Assert.assertNotNull(containerData);
           Assert.assertEquals(Priority.newInstance(containerId.getId()),
@@ -159,7 +159,7 @@ public class TestFileSystemApplicationHistoryStore extends
         ContainerHistoryData masterContainer =
             store.getAMContainer(appAttemptId);
         Assert.assertNotNull(masterContainer);
-        Assert.assertEquals(ContainerId.newContainerId(appAttemptId, 1),
+        Assert.assertEquals(ContainerId.newInstance(appAttemptId, 1),
           masterContainer.getContainerId());
       }
     }
@@ -186,7 +186,7 @@ public class TestFileSystemApplicationHistoryStore extends
       Assert.assertTrue(e.getMessage().contains("is not opened"));
     }
     // write container history data
-    ContainerId containerId = ContainerId.newContainerId(appAttemptId, 1);
+    ContainerId containerId = ContainerId.newInstance(appAttemptId, 1);
     try {
       writeContainerStartData(containerId);
       Assert.fail();
@@ -209,8 +209,8 @@ public class TestFileSystemApplicationHistoryStore extends
     writeApplicationStartData(appId);
     ApplicationAttemptId appAttemptId =
         ApplicationAttemptId.newInstance(appId, 1);
-    for (int i = 1; i <= 1000; ++i) {
-      ContainerId containerId = ContainerId.newContainerId(appAttemptId, i);
+    for (int i = 1; i <= 100000; ++i) {
+      ContainerId containerId = ContainerId.newInstance(appAttemptId, i);
       writeContainerStartData(containerId);
       writeContainerFinishData(containerId);
     }

http://git-wip-us.apache.org/repos/asf/ambari/blob/4d3839c7/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestMemoryApplicationHistoryStore.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestMemoryApplicationHistoryStore.java b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestMemoryApplicationHistoryStore.java
index b4da01a..fc5c096 100644
--- a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestMemoryApplicationHistoryStore.java
+++ b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestMemoryApplicationHistoryStore.java
@@ -137,7 +137,7 @@ public class TestMemoryApplicationHistoryStore extends
     ApplicationId appId = ApplicationId.newInstance(0, 1);
     ApplicationAttemptId appAttemptId =
         ApplicationAttemptId.newInstance(appId, 1);
-    ContainerId containerId = ContainerId.newContainerId(appAttemptId, 1);
+    ContainerId containerId = ContainerId.newInstance(appAttemptId, 1);
     try {
       writeContainerFinishData(containerId);
       Assert.fail();
@@ -149,14 +149,14 @@ public class TestMemoryApplicationHistoryStore extends
     writeApplicationAttemptStartData(appAttemptId);
     int numContainers = 5;
     for (int i = 1; i <= numContainers; ++i) {
-      containerId = ContainerId.newContainerId(appAttemptId, i);
+      containerId = ContainerId.newInstance(appAttemptId, i);
       writeContainerStartData(containerId);
       writeContainerFinishData(containerId);
     }
     Assert
       .assertEquals(numContainers, store.getContainers(appAttemptId).size());
     for (int i = 1; i <= numContainers; ++i) {
-      containerId = ContainerId.newContainerId(appAttemptId, i);
+      containerId = ContainerId.newInstance(appAttemptId, i);
       ContainerHistoryData data = store.getContainer(containerId);
       Assert.assertNotNull(data);
       Assert.assertEquals(Priority.newInstance(containerId.getId()),
@@ -165,11 +165,11 @@ public class TestMemoryApplicationHistoryStore extends
     }
     ContainerHistoryData masterContainer = store.getAMContainer(appAttemptId);
     Assert.assertNotNull(masterContainer);
-    Assert.assertEquals(ContainerId.newContainerId(appAttemptId, 1),
+    Assert.assertEquals(ContainerId.newInstance(appAttemptId, 1),
       masterContainer.getContainerId());
     writeApplicationAttemptFinishData(appAttemptId);
     // Write again
-    containerId = ContainerId.newContainerId(appAttemptId, 1);
+    containerId = ContainerId.newInstance(appAttemptId, 1);
     try {
       writeContainerStartData(containerId);
       Assert.fail();
@@ -195,7 +195,7 @@ public class TestMemoryApplicationHistoryStore extends
     ApplicationAttemptId appAttemptId =
         ApplicationAttemptId.newInstance(appId, 1);
     for (int i = 1; i <= numContainers; ++i) {
-      ContainerId containerId = ContainerId.newContainerId(appAttemptId, i);
+      ContainerId containerId = ContainerId.newInstance(appAttemptId, i);
       writeContainerStartData(containerId);
       writeContainerFinishData(containerId);
     }

http://git-wip-us.apache.org/repos/asf/ambari/blob/4d3839c7/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/TestAHSWebServices.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/TestAHSWebServices.java b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/TestAHSWebServices.java
index 44b3f65..e78dfcc 100644
--- a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/TestAHSWebServices.java
+++ b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/TestAHSWebServices.java
@@ -269,7 +269,7 @@ public class TestAHSWebServices extends JerseyTest {
     ApplicationId appId = ApplicationId.newInstance(0, 1);
     ApplicationAttemptId appAttemptId =
         ApplicationAttemptId.newInstance(appId, 1);
-    ContainerId containerId = ContainerId.newContainerId(appAttemptId, 1);
+    ContainerId containerId = ContainerId.newInstance(appAttemptId, 1);
     WebResource r = resource();
     ClientResponse response =
         r.path("ws").path("v1").path("applicationhistory").path("apps")

http://git-wip-us.apache.org/repos/asf/ambari/blob/4d3839c7/ambari-server/src/main/java/org/apache/ambari/server/configuration/Configuration.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/configuration/Configuration.java b/ambari-server/src/main/java/org/apache/ambari/server/configuration/Configuration.java
index 221b83d..eee4b61 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/configuration/Configuration.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/configuration/Configuration.java
@@ -184,9 +184,6 @@ public class Configuration {
   public static final String LDAP_REFERRAL_KEY = "authentication.ldap.referral";
   public static final String LDAP_PAGINATION_ENABLED_KEY = "authentication.ldap.pagination.enabled";
   public static final String SERVER_EC_CACHE_SIZE = "server.ecCacheSize";
-  public static final String SERVER_HRC_STATUS_SUMMARY_CACHE_ENABLED = "server.hrcStatusSummary.cache.enabled";
-  public static final String SERVER_HRC_STATUS_SUMMARY_CACHE_SIZE = "server.hrcStatusSummary.cache.size";
-  public static final String SERVER_HRC_STATUS_SUMMARY_CACHE_EXPIRY_DURATION = "server.hrcStatusSummary.cache.expiryDuration";
   public static final String SERVER_STALE_CONFIG_CACHE_ENABLED_KEY = "server.cache.isStale.enabled";
   public static final String SERVER_PERSISTENCE_TYPE_KEY = "server.persistence.type";
   public static final String SERVER_JDBC_USER_NAME_KEY = "server.jdbc.user.name";
@@ -281,9 +278,6 @@ public class Configuration {
   public static final String TEMPORARY_KEYSTORE_ACTIVELY_PURGE = "security.temporary.keystore.actibely.purge";
   public static final boolean TEMPORARY_KEYSTORE_ACTIVELY_PURGE_DEFAULT = true;
 
-  // Alerts notifications properties
-  public static final String AMBARI_DISPLAY_URL = "ambari.display.url";
-
   /**
    * Key for repo validation suffixes.
    */
@@ -370,11 +364,6 @@ public class Configuration {
 
   public static final String CUSTOM_ACTION_DEFINITION_KEY = "custom.action.definitions";
   public static final String SHARED_RESOURCES_DIR_KEY = "shared.resources.dir";
-
-  protected static final boolean SERVER_HRC_STATUS_SUMMARY_CACHE_ENABLED_DEFAULT = true;
-  protected static final long SERVER_HRC_STATUS_SUMMARY_CACHE_SIZE_DEFAULT = 10000L;
-  protected static final long SERVER_HRC_STATUS_SUMMARY_CACHE_EXPIRY_DURATION_DEFAULT = 30; //minutes
-
   private static final String CUSTOM_ACTION_DEFINITION_DEF_VALUE = "/var/lib/ambari-server/resources/custom_action_definitions";
 
   private static final long SERVER_EC_CACHE_SIZE_DEFAULT = 10000L;
@@ -1785,75 +1774,6 @@ public class Configuration {
   }
 
   /**
-   * Caching of host role command status summary can be enabled/disabled
-   * through the {@link #SERVER_HRC_STATUS_SUMMARY_CACHE_ENABLED} config property.
-   * This method returns the value of {@link #SERVER_HRC_STATUS_SUMMARY_CACHE_ENABLED}
-   * config property. If this config property is not defined than returns the default defined by {@link #SERVER_HRC_STATUS_SUMMARY_CACHE_ENABLED_DEFAULT}.
-   * @return true if caching is to be enabled otherwise false.
-   */
-  public boolean getHostRoleCommandStatusSummaryCacheEnabled() {
-    String stringValue = properties.getProperty(SERVER_HRC_STATUS_SUMMARY_CACHE_ENABLED);
-    boolean value = SERVER_HRC_STATUS_SUMMARY_CACHE_ENABLED_DEFAULT;
-    if (stringValue != null) {
-      try {
-        value = Boolean.valueOf(stringValue);
-      }
-      catch (NumberFormatException ignored) {
-      }
-
-    }
-
-    return value;
-  }
-
-  /**
-   * In order to avoid the cache storing host role command status summary objects exhaust
-   * memory we set a max record number allowed for the cache. This limit can be configured
-   * through {@link #SERVER_HRC_STATUS_SUMMARY_CACHE_SIZE} config property. The method returns
-   * the value of this config property. If this config property is not defined than
-   * the default value specified by {@link #SERVER_HRC_STATUS_SUMMARY_CACHE_SIZE_DEFAULT} is returned.
-   * @return the upper limit for the number of cached host role command summaries.
-   */
-  public long getHostRoleCommandStatusSummaryCacheSize() {
-    String stringValue = properties.getProperty(SERVER_HRC_STATUS_SUMMARY_CACHE_SIZE);
-    long value = SERVER_HRC_STATUS_SUMMARY_CACHE_SIZE_DEFAULT;
-    if (stringValue != null) {
-      try {
-        value = Long.valueOf(stringValue);
-      }
-      catch (NumberFormatException ignored) {
-      }
-
-    }
-
-    return value;
-  }
-
-  /**
-   * As a safety measure the cache storing host role command status summaries should auto expire after a while.
-   * The expiry duration is specified through the {@link #SERVER_HRC_STATUS_SUMMARY_CACHE_EXPIRY_DURATION} config property
-   * expressed in minutes. The method returns the value of this config property. If this config property is not defined than
-   * the default value specified by {@link #SERVER_HRC_STATUS_SUMMARY_CACHE_EXPIRY_DURATION_DEFAULT}
-   * @return the cache expiry duration in minutes
-   */
-  public long getHostRoleCommandStatusSummaryCacheExpiryDuration() {
-    String stringValue = properties.getProperty(SERVER_HRC_STATUS_SUMMARY_CACHE_EXPIRY_DURATION);
-    long value = SERVER_HRC_STATUS_SUMMARY_CACHE_EXPIRY_DURATION_DEFAULT;
-    if (stringValue != null) {
-      try {
-        value = Long.valueOf(stringValue);
-      }
-      catch (NumberFormatException ignored) {
-      }
-
-    }
-
-    return value;
-  }
-
-
-
-  /**
    * @return whether staleConfig's flag is cached.
    */
   public boolean isStaleConfigCacheEnabled() {
@@ -2581,15 +2501,6 @@ public class Configuration {
   }
 
   /**
-   * Get the ambari display URL
-   * @return
-   */
-  public String getAmbariDisplayUrl() {
-    return properties.getProperty(AMBARI_DISPLAY_URL, null);
-  }
-
-
-  /**
    * @return number of retry attempts for api and blueprint operations
    */
   public int getOperationsRetryAttempts() {

http://git-wip-us.apache.org/repos/asf/ambari/blob/4d3839c7/ambari-server/src/main/java/org/apache/ambari/server/controller/ControllerModule.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/ControllerModule.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/ControllerModule.java
index daca64d..76ff6db 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/ControllerModule.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/ControllerModule.java
@@ -65,7 +65,6 @@ import org.apache.ambari.server.notifications.NotificationDispatcher;
 import org.apache.ambari.server.orm.DBAccessor;
 import org.apache.ambari.server.orm.DBAccessorImpl;
 import org.apache.ambari.server.orm.PersistenceType;
-import org.apache.ambari.server.orm.dao.HostRoleCommandDAO;
 import org.apache.ambari.server.scheduler.ExecutionScheduler;
 import org.apache.ambari.server.scheduler.ExecutionSchedulerImpl;
 import org.apache.ambari.server.security.AmbariEntryPoint;
@@ -339,21 +338,6 @@ public class ControllerModule extends AbstractModule {
     bindConstant().annotatedWith(Names.named("executionCommandCacheSize")).
         to(configuration.getExecutionCommandsCacheSize());
 
-
-    // Host role commands status summary max cache enable/disable
-    bindConstant().annotatedWith(Names.named(HostRoleCommandDAO.HRC_STATUS_SUMMARY_CACHE_ENABLED)).
-      to(configuration.getHostRoleCommandStatusSummaryCacheEnabled());
-
-    // Host role commands status summary max cache size
-    bindConstant().annotatedWith(Names.named(HostRoleCommandDAO.HRC_STATUS_SUMMARY_CACHE_SIZE)).
-      to(configuration.getHostRoleCommandStatusSummaryCacheSize());
-    // Host role command status summary cache expiry duration in minutes
-    bindConstant().annotatedWith(Names.named(HostRoleCommandDAO.HRC_STATUS_SUMMARY_CACHE_EXPIRY_DURATION_MINUTES)).
-      to(configuration.getHostRoleCommandStatusSummaryCacheExpiryDuration());
-
-
-
-
     bind(AmbariManagementController.class).to(
       AmbariManagementControllerImpl.class);
     bind(AbstractRootServiceResponseFactory.class).to(RootServiceResponseFactory.class);

http://git-wip-us.apache.org/repos/asf/ambari/blob/4d3839c7/ambari-server/src/main/java/org/apache/ambari/server/controller/ServiceComponentRequest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/ServiceComponentRequest.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/ServiceComponentRequest.java
index ba0b84f..78b9897 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/ServiceComponentRequest.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/ServiceComponentRequest.java
@@ -31,28 +31,21 @@ public class ServiceComponentRequest {
 
   private String componentCategory;
 
-  private String recoveryEnabled; // CREATE/UPDATE
-
   public ServiceComponentRequest(String clusterName, String serviceName,
                                  String componentName, String desiredState) {
-    this(clusterName, serviceName, componentName, desiredState, null, null);
-  }
-
-  public ServiceComponentRequest(String clusterName, String serviceName,
-                                 String componentName, String desiredState,
-                                 String recoveryEnabled) {
-    this(clusterName, serviceName, componentName, desiredState, recoveryEnabled, null);
+    this.clusterName = clusterName;
+    this.serviceName = serviceName;
+    this.componentName = componentName;
+    this.desiredState = desiredState;
   }
 
   public ServiceComponentRequest(String clusterName,
                                  String serviceName, String componentName,
-                                 String desiredState, String recoveryEnabled,
-                                 String componentCategory) {
+                                 String desiredState, String componentCategory) {
     this.clusterName = clusterName;
     this.serviceName = serviceName;
     this.componentName = componentName;
     this.desiredState = desiredState;
-    this.recoveryEnabled = recoveryEnabled;
     this.componentCategory = componentCategory;
   }
 
@@ -112,20 +105,6 @@ public class ServiceComponentRequest {
     this.clusterName = clusterName;
   }
 
-  /**
-   * @return recoveryEnabled
-   */
-  public String getRecoveryEnabled() {
-    return recoveryEnabled;
-  }
-
-  /**
-   * @param recoveryEnabled the recoveryEnabled value to set.
-   */
-  public void setRecoveryEnabled(String recoveryEnabled) {
-    this.recoveryEnabled = recoveryEnabled;
-  }
-
   public String getComponentCategory() {
     return componentCategory;
   }
@@ -136,7 +115,7 @@ public class ServiceComponentRequest {
 
   @Override
   public String toString() {
-    return String.format("[clusterName=%s, serviceName=%s, componentName=%s, desiredState=%s, recoveryEnabled=%s, componentCategory=%s]",
-        clusterName, serviceName, clusterName, desiredState, recoveryEnabled, componentCategory);
+    return String.format("[clusterName=%s, serviceName=%s, componentName=%s, desiredState=%s, componentCategory=%s]",
+        clusterName, serviceName, clusterName, desiredState, componentCategory);
   }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/4d3839c7/ambari-server/src/main/java/org/apache/ambari/server/controller/ServiceComponentResponse.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/ServiceComponentResponse.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/ServiceComponentResponse.java
index 381b114..f7dd301 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/ServiceComponentResponse.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/ServiceComponentResponse.java
@@ -41,8 +41,6 @@ public class ServiceComponentResponse {
 
   private int installedCount;
 
-  private boolean recoveryEnabled;
-
   public ServiceComponentResponse(Long clusterId, String clusterName,
                                   String serviceName,
                                   String componentName,
@@ -50,8 +48,7 @@ public class ServiceComponentResponse {
                                   String desiredState,
                                   int totalCount,
                                   int startedCount,
-                                  int installedCount,
-                                  boolean recoveryEnabled) {
+                                  int installedCount) {
     super();
     this.clusterId = clusterId;
     this.clusterName = clusterName;
@@ -62,7 +59,6 @@ public class ServiceComponentResponse {
     this.totalCount = totalCount;
     this.startedCount = startedCount;
     this.installedCount = installedCount;
-    this.recoveryEnabled = recoveryEnabled;
   }
 
   /**
@@ -215,22 +211,6 @@ public class ServiceComponentResponse {
     this.totalCount = totalCount;
   }
 
-  /**
-   * Get a true or false value indicating if the service component is auto start enabled
-   * @return true or false
-   */
-  public boolean isRecoveryEnabled() {
-    return recoveryEnabled;
-  }
-
-  /**
-   * Set a true or false value indicating whether the service component is auto start enabled
-   * @param recoveryEnabled
-   */
-  public void setRecoveryEnabled(boolean recoveryEnabled) {
-    this.recoveryEnabled = recoveryEnabled;
-  }
-
   @Override
   public boolean equals(Object o) {
     if (this == o) return true;

http://git-wip-us.apache.org/repos/asf/ambari/blob/4d3839c7/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ComponentResourceProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ComponentResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ComponentResourceProvider.java
index b339adf..3ad6e64 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ComponentResourceProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ComponentResourceProvider.java
@@ -84,7 +84,6 @@ public class ComponentResourceProvider extends AbstractControllerResourceProvide
   protected static final String COMPONENT_TOTAL_COUNT_PROPERTY_ID     = "ServiceComponentInfo/total_count";
   protected static final String COMPONENT_STARTED_COUNT_PROPERTY_ID   = "ServiceComponentInfo/started_count";
   protected static final String COMPONENT_INSTALLED_COUNT_PROPERTY_ID = "ServiceComponentInfo/installed_count";
-  protected static final String COMPONENT_RECOVERY_ENABLED_ID         = "ServiceComponentInfo/recovery_enabled";
 
   private static final String TRUE = "true";
 
@@ -179,7 +178,6 @@ public class ComponentResourceProvider extends AbstractControllerResourceProvide
       setResourceProperty(resource, COMPONENT_TOTAL_COUNT_PROPERTY_ID, response.getTotalCount(), requestedIds);
       setResourceProperty(resource, COMPONENT_STARTED_COUNT_PROPERTY_ID, response.getStartedCount(), requestedIds);
       setResourceProperty(resource, COMPONENT_INSTALLED_COUNT_PROPERTY_ID, response.getInstalledCount(), requestedIds);
-      setResourceProperty(resource, COMPONENT_RECOVERY_ENABLED_ID, String.valueOf(response.isRecoveryEnabled()), requestedIds);
 
       resources.add(resource);
     }
@@ -253,7 +251,6 @@ public class ComponentResourceProvider extends AbstractControllerResourceProvide
         (String) properties.get(COMPONENT_SERVICE_NAME_PROPERTY_ID),
         (String) properties.get(COMPONENT_COMPONENT_NAME_PROPERTY_ID),
         (String) properties.get(COMPONENT_STATE_PROPERTY_ID),
-        (String) properties.get(COMPONENT_RECOVERY_ENABLED_ID),
         (String) properties.get(COMPONENT_CATEGORY_PROPERTY_ID));
   }
 
@@ -466,9 +463,6 @@ public class ComponentResourceProvider extends AbstractControllerResourceProvide
     Map<String, Map<String, Set<String>>> componentNames = new HashMap<>();
     Set<State> seenNewStates = new HashSet<>();
 
-    Collection<ServiceComponent> recoveryEnabledComponents = new ArrayList<>();
-    Collection<ServiceComponent> recoveryDisabledComponents = new ArrayList<>();
-
     // Determine operation level
     Resource.Type reqOpLvl;
     if (requestProperties.containsKey(RequestOperationLevel.OPERATION_LEVEL_ID)) {
@@ -519,20 +513,6 @@ public class ComponentResourceProvider extends AbstractControllerResourceProvide
         continue;
       }
 
-      // Gather the components affected by the change in
-      // auto start state
-      if (!StringUtils.isEmpty(request.getRecoveryEnabled())) {
-        boolean newRecoveryEnabled = Boolean.parseBoolean(request.getRecoveryEnabled());
-        boolean oldRecoveryEnabled = sc.isRecoveryEnabled();
-        if (newRecoveryEnabled != oldRecoveryEnabled) {
-          if (newRecoveryEnabled) {
-            recoveryEnabledComponents.add(sc);
-          } else {
-            recoveryDisabledComponents.add(sc);
-          }
-        }
-      }
-
       if (newState == null) {
         debug("Nothing to do for new updateServiceComponent request, request ={}, newDesiredState=null" + request);
         continue;
@@ -559,11 +539,9 @@ public class ComponentResourceProvider extends AbstractControllerResourceProvide
               + ", clusterId=" + cluster.getClusterId()
               + ", serviceName=" + sc.getServiceName()
               + ", componentName=" + sc.getName()
-              + ", recoveryEnabled=" + sc.isRecoveryEnabled()
               + ", currentDesiredState=" + oldScState
               + ", newDesiredState=" + newState);
         }
-
         if (!changedComps.containsKey(newState)) {
           changedComps.put(newState, new ArrayList<ServiceComponent>());
         }
@@ -571,7 +549,6 @@ public class ComponentResourceProvider extends AbstractControllerResourceProvide
               + ", clusterName=" + clusterName
               + ", serviceName=" + serviceName
               + ", componentName=" + sc.getName()
-              + ", recoveryEnabled=" + sc.isRecoveryEnabled()
               + ", currentDesiredState=" + oldScState
               + ", newDesiredState=" + newState);
 
@@ -585,7 +562,6 @@ public class ComponentResourceProvider extends AbstractControllerResourceProvide
                 + ", clusterName=" + clusterName
                 + ", serviceName=" + serviceName
                 + ", componentName=" + sc.getName()
-                + ", recoveryEnabled=" + sc.isRecoveryEnabled()
                 + ", hostname=" + sch.getHostName()
                 + ", currentState=" + oldSchState
                 + ", newDesiredState=" + newState);
@@ -598,7 +574,6 @@ public class ComponentResourceProvider extends AbstractControllerResourceProvide
                 + ", clusterName=" + clusterName
                 + ", serviceName=" + serviceName
                 + ", componentName=" + sc.getName()
-                + ", recoveryEnabled=" + sc.isRecoveryEnabled()
                 + ", hostname=" + sch.getHostName()
                 + ", currentState=" + oldSchState
                 + ", newDesiredState=" + newState);
@@ -612,7 +587,6 @@ public class ComponentResourceProvider extends AbstractControllerResourceProvide
                 + ", clusterName=" + clusterName
                 + ", serviceName=" + serviceName
                 + ", componentName=" + sc.getName()
-                + ", recoveryEnabled=" + sc.isRecoveryEnabled()
                 + ", hostname=" + sch.getHostName());
 
           continue;
@@ -626,7 +600,6 @@ public class ComponentResourceProvider extends AbstractControllerResourceProvide
               + ", clusterId=" + cluster.getClusterId()
               + ", serviceName=" + sch.getServiceName()
               + ", componentName=" + sch.getServiceComponentName()
-              + ", recoveryEnabled=" + sc.isRecoveryEnabled()
               + ", hostname=" + sch.getHostName()
               + ", currentState=" + oldSchState
               + ", newDesiredState=" + newState);
@@ -642,7 +615,6 @@ public class ComponentResourceProvider extends AbstractControllerResourceProvide
               + ", clusterName=" + clusterName
               + ", serviceName=" + serviceName
               + ", componentName=" + sc.getName()
-              + ", recoveryEnabled=" + sc.isRecoveryEnabled()
               + ", hostname=" + sch.getHostName()
               + ", currentState=" + oldSchState
               + ", newDesiredState=" + newState);
@@ -656,16 +628,6 @@ public class ComponentResourceProvider extends AbstractControllerResourceProvide
 
     // TODO additional validation?
 
-    // Validations completed. Update the affected service components now.
-
-    for (ServiceComponent sc : recoveryEnabledComponents) {
-      sc.setRecoveryEnabled(true);
-    }
-
-    for (ServiceComponent sc : recoveryDisabledComponents) {
-      sc.setRecoveryEnabled(false);
-    }
-
     Cluster cluster = clusters.getCluster(clusterNames.iterator().next());
 
     return getManagementController().createAndPersistStages(cluster, requestProperties, null, null, changedComps, changedScHosts,

http://git-wip-us.apache.org/repos/asf/ambari/blob/4d3839c7/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/HostRoleCommandDAO.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/HostRoleCommandDAO.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/HostRoleCommandDAO.java
index deca9b1..4fd03e5 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/HostRoleCommandDAO.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/HostRoleCommandDAO.java
@@ -18,6 +18,9 @@
 
 package org.apache.ambari.server.orm.dao;
 
+import static org.apache.ambari.server.orm.DBAccessor.DbType.ORACLE;
+import static org.apache.ambari.server.orm.dao.DaoUtils.ORACLE_LIST_LIMIT;
+
 import java.text.MessageFormat;
 import java.util.ArrayList;
 import java.util.Collection;
@@ -25,7 +28,6 @@ import java.util.Collections;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
-import java.util.concurrent.TimeUnit;
 
 import javax.persistence.EntityManager;
 import javax.persistence.TypedQuery;
@@ -47,27 +49,16 @@ import org.apache.ambari.server.orm.entities.HostEntity;
 import org.apache.ambari.server.orm.entities.HostRoleCommandEntity;
 import org.apache.ambari.server.orm.entities.HostRoleCommandEntity_;
 import org.apache.ambari.server.orm.entities.StageEntity;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
 
-import com.google.common.cache.CacheBuilder;
-import com.google.common.cache.CacheLoader;
-import com.google.common.cache.LoadingCache;
 import com.google.common.collect.Lists;
 import com.google.inject.Inject;
 import com.google.inject.Provider;
 import com.google.inject.Singleton;
-import com.google.inject.name.Named;
 import com.google.inject.persist.Transactional;
 
-import static org.apache.ambari.server.orm.DBAccessor.DbType.ORACLE;
-import static org.apache.ambari.server.orm.dao.DaoUtils.ORACLE_LIST_LIMIT;
-
 @Singleton
 public class HostRoleCommandDAO {
 
-  private static final Logger LOG = LoggerFactory.getLogger(HostRoleCommandDAO.class);
-
   private static final String SUMMARY_DTO = String.format(
     "SELECT NEW %s(" +
       "MAX(hrc.stage.skippable), " +
@@ -101,122 +92,12 @@ public class HostRoleCommandDAO {
    */
   private static final String COMPLETED_REQUESTS_SQL = "SELECT DISTINCT task.requestId FROM HostRoleCommandEntity task WHERE task.requestId NOT IN (SELECT task.requestId FROM HostRoleCommandEntity task WHERE task.status IN :notCompletedStatuses) ORDER BY task.requestId {0}";
 
-  /**
-   * A cache that holds {@link HostRoleCommandStatusSummaryDTO} grouped by stage id for requests by request id.
-   * The JPQL computing the host role command status summary for a request is rather expensive
-   * thus this cache helps reducing the load on the database
-   */
-  private final LoadingCache<Long, Map<Long, HostRoleCommandStatusSummaryDTO>> hrcStatusSummaryCache;
-
-  /**
-   * Specifies whether caching for {@link HostRoleCommandStatusSummaryDTO} grouped by stage id for requests
-   * is enabled.
-   */
-  private final boolean hostRoleCommandStatusSummaryCacheEnabled;
-
-
   @Inject
   Provider<EntityManager> entityManagerProvider;
 
   @Inject
   DaoUtils daoUtils;
 
-  public final static String HRC_STATUS_SUMMARY_CACHE_SIZE =  "hostRoleCommandStatusSummaryCacheSize";
-  public final static String HRC_STATUS_SUMMARY_CACHE_EXPIRY_DURATION_MINUTES = "hostRoleCommandStatusCacheExpiryDurationMins";
-  public final static String HRC_STATUS_SUMMARY_CACHE_ENABLED =  "hostRoleCommandStatusSummaryCacheEnabled";
-
-  /**
-   * Invalidates the host role command status summary cache entry that corresponds to the given request.
-   * @param requestId the key of the cache entry to be invalidated.
-   */
-  protected void invalidateHostRoleCommandStatusSummaryCache(Long requestId) {
-    if (!hostRoleCommandStatusSummaryCacheEnabled )
-      return;
-
-    LOG.debug("Invalidating host role command status summary cache for request {} !", requestId);
-    hrcStatusSummaryCache.invalidate(requestId);
-
-  }
-
-  /**
-   * Invalidates those entries in host role command status cache which are dependent on the passed {@link org.apache.ambari.server.orm.entities.HostRoleCommandEntity}
-   * entity.
-   * @param hostRoleCommandEntity
-   */
-  protected void invalidateHostRoleCommandStatusCache(HostRoleCommandEntity hostRoleCommandEntity) {
-    if ( !hostRoleCommandStatusSummaryCacheEnabled )
-      return;
-
-    if (hostRoleCommandEntity != null) {
-      Long requestId = hostRoleCommandEntity.getRequestId();
-      if (requestId == null) {
-        StageEntity stageEntity = hostRoleCommandEntity.getStage();
-        if (stageEntity != null)
-          requestId = stageEntity.getRequestId();
-      }
-
-      if (requestId != null)
-        invalidateHostRoleCommandStatusSummaryCache(requestId.longValue());
-    }
-
-  }
-
-  /**
-   * Loads the counts of tasks for a request and groups them by stage id.
-   * This allows for very efficient loading when there are a huge number of stages
-   * and tasks to iterate (for example, during a Stack Upgrade).
-   * @param requestId the request id
-   * @return the map of stage-to-summary objects
-   */
-  @RequiresSession
-  protected Map<Long, HostRoleCommandStatusSummaryDTO> loadAggregateCounts(Long requestId) {
-
-    TypedQuery<HostRoleCommandStatusSummaryDTO> query = entityManagerProvider.get().createQuery(
-      SUMMARY_DTO, HostRoleCommandStatusSummaryDTO.class);
-
-    query.setParameter("requestId", requestId);
-    query.setParameter("aborted", HostRoleStatus.ABORTED);
-    query.setParameter("completed", HostRoleStatus.COMPLETED);
-    query.setParameter("failed", HostRoleStatus.FAILED);
-    query.setParameter("holding", HostRoleStatus.HOLDING);
-    query.setParameter("holding_failed", HostRoleStatus.HOLDING_FAILED);
-    query.setParameter("holding_timedout", HostRoleStatus.HOLDING_TIMEDOUT);
-    query.setParameter("in_progress", HostRoleStatus.IN_PROGRESS);
-    query.setParameter("pending", HostRoleStatus.PENDING);
-    query.setParameter("queued", HostRoleStatus.QUEUED);
-    query.setParameter("timedout", HostRoleStatus.TIMEDOUT);
-    query.setParameter("skipped_failed", HostRoleStatus.SKIPPED_FAILED);
-
-    Map<Long, HostRoleCommandStatusSummaryDTO> map = new HashMap<Long, HostRoleCommandStatusSummaryDTO>();
-
-    for (HostRoleCommandStatusSummaryDTO dto : daoUtils.selectList(query)) {
-      map.put(dto.getStageId(), dto);
-    }
-
-    return map;
-  }
-
-  @Inject
-  public HostRoleCommandDAO(@Named(HRC_STATUS_SUMMARY_CACHE_ENABLED) boolean hostRoleCommandStatusSummaryCacheEnabled, @Named(HRC_STATUS_SUMMARY_CACHE_SIZE) long hostRoleCommandStatusSummaryCacheLimit, @Named(HRC_STATUS_SUMMARY_CACHE_EXPIRY_DURATION_MINUTES) long hostRoleCommandStatusSummaryCacheExpiryDurationMins) {
-    this.hostRoleCommandStatusSummaryCacheEnabled = hostRoleCommandStatusSummaryCacheEnabled;
-
-    LOG.info("Host role command status summary cache {} !", hostRoleCommandStatusSummaryCacheEnabled ? "enabled" : "disabled");
-
-
-    hrcStatusSummaryCache = CacheBuilder.newBuilder()
-      .maximumSize(hostRoleCommandStatusSummaryCacheLimit)
-      .expireAfterAccess(hostRoleCommandStatusSummaryCacheExpiryDurationMins, TimeUnit.MINUTES)
-      .build(new CacheLoader<Long, Map<Long, HostRoleCommandStatusSummaryDTO>>() {
-        @Override
-        public Map<Long, HostRoleCommandStatusSummaryDTO> load(Long requestId) throws Exception {
-          LOG.debug("Cache miss for host role command status summary object for request {}, fetching from JPA", requestId);
-          Map<Long, HostRoleCommandStatusSummaryDTO> hrcCommandStatusByStageId = loadAggregateCounts(requestId);
-
-          return hrcCommandStatusByStageId;
-        }
-      });
-  }
-
   @RequiresSession
   public HostRoleCommandEntity findByPK(long taskId) {
     return entityManagerProvider.get().find(HostRoleCommandEntity.class, taskId);
@@ -544,16 +425,11 @@ public class HostRoleCommandDAO {
   @Transactional
   public void create(HostRoleCommandEntity stageEntity) {
     entityManagerProvider.get().persist(stageEntity);
-
-    invalidateHostRoleCommandStatusCache(stageEntity);
   }
 
   @Transactional
   public HostRoleCommandEntity merge(HostRoleCommandEntity stageEntity) {
     HostRoleCommandEntity entity = entityManagerProvider.get().merge(stageEntity);
-
-    invalidateHostRoleCommandStatusCache(entity);
-
     return entity;
   }
 
@@ -570,8 +446,6 @@ public class HostRoleCommandDAO {
     List<HostRoleCommandEntity> managedList = new ArrayList<HostRoleCommandEntity>(entities.size());
     for (HostRoleCommandEntity entity : entities) {
       managedList.add(entityManagerProvider.get().merge(entity));
-
-      invalidateHostRoleCommandStatusCache(entity);
     }
     return managedList;
   }
@@ -579,8 +453,6 @@ public class HostRoleCommandDAO {
   @Transactional
   public void remove(HostRoleCommandEntity stageEntity) {
     entityManagerProvider.get().remove(merge(stageEntity));
-
-    invalidateHostRoleCommandStatusCache(stageEntity);
   }
 
   @Transactional
@@ -591,16 +463,38 @@ public class HostRoleCommandDAO {
 
   /**
    * Finds the counts of tasks for a request and groups them by stage id.
+   * This allows for very efficient loading when there are a huge number of stages
+   * and tasks to iterate (for example, during a Stack Upgrade).
    * @param requestId the request id
    * @return the map of stage-to-summary objects
    */
+  @RequiresSession
   public Map<Long, HostRoleCommandStatusSummaryDTO> findAggregateCounts(Long requestId) {
-    if (hostRoleCommandStatusSummaryCacheEnabled)
-      return hrcStatusSummaryCache.getUnchecked(requestId);
-    else
-      return loadAggregateCounts(requestId); // if caching not enabled fall back to fetching through JPA
-  }
 
+    TypedQuery<HostRoleCommandStatusSummaryDTO> query = entityManagerProvider.get().createQuery(
+        SUMMARY_DTO, HostRoleCommandStatusSummaryDTO.class);
+
+    query.setParameter("requestId", requestId);
+    query.setParameter("aborted", HostRoleStatus.ABORTED);
+    query.setParameter("completed", HostRoleStatus.COMPLETED);
+    query.setParameter("failed", HostRoleStatus.FAILED);
+    query.setParameter("holding", HostRoleStatus.HOLDING);
+    query.setParameter("holding_failed", HostRoleStatus.HOLDING_FAILED);
+    query.setParameter("holding_timedout", HostRoleStatus.HOLDING_TIMEDOUT);
+    query.setParameter("in_progress", HostRoleStatus.IN_PROGRESS);
+    query.setParameter("pending", HostRoleStatus.PENDING);
+    query.setParameter("queued", HostRoleStatus.QUEUED);
+    query.setParameter("timedout", HostRoleStatus.TIMEDOUT);
+    query.setParameter("skipped_failed", HostRoleStatus.SKIPPED_FAILED);
+
+    Map<Long, HostRoleCommandStatusSummaryDTO> map = new HashMap<Long, HostRoleCommandStatusSummaryDTO>();
+
+    for (HostRoleCommandStatusSummaryDTO dto : daoUtils.selectList(query)) {
+      map.put(dto.getStageId(), dto);
+    }
+
+    return map;
+  }
 
   /**
    * Updates the {@link HostRoleCommandEntity#isFailureAutoSkipped()} flag for

http://git-wip-us.apache.org/repos/asf/ambari/blob/4d3839c7/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/HostRoleCommandEntity.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/HostRoleCommandEntity.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/HostRoleCommandEntity.java
index 1674175..af71c40 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/HostRoleCommandEntity.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/HostRoleCommandEntity.java
@@ -32,7 +32,6 @@ import javax.persistence.FetchType;
 import javax.persistence.GeneratedValue;
 import javax.persistence.GenerationType;
 import javax.persistence.Id;
-import javax.persistence.Index;
 import javax.persistence.JoinColumn;
 import javax.persistence.JoinColumns;
 import javax.persistence.Lob;
@@ -49,11 +48,7 @@ import org.apache.ambari.server.actionmanager.HostRoleStatus;
 import org.apache.commons.lang.ArrayUtils;
 
 @Entity
-@Table(name = "host_role_command"
-       , indexes = {
-           @Index(name = "idx_hrc_request_id", columnList = "request_id")
-         , @Index(name = "idx_hrc_status_role", columnList = "status, role")
-       })
+@Table(name = "host_role_command")
 @TableGenerator(name = "host_role_command_id_generator",
     table = "ambari_sequences", pkColumnName = "sequence_name", valueColumnName = "sequence_value"
     , pkColumnValue = "host_role_command_id_seq"

http://git-wip-us.apache.org/repos/asf/ambari/blob/4d3839c7/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ServiceComponentDesiredStateEntity.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ServiceComponentDesiredStateEntity.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ServiceComponentDesiredStateEntity.java
index 519e4e6..65cc107 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ServiceComponentDesiredStateEntity.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ServiceComponentDesiredStateEntity.java
@@ -81,9 +81,6 @@ public class ServiceComponentDesiredStateEntity {
   @Enumerated(EnumType.STRING)
   private State desiredState = State.INIT;
 
-  @Column(name = "recovery_enabled", nullable = false, insertable = true, updatable = true)
-  private Integer recoveryEnabled = 0;
-
   /**
    * Unidirectional one-to-one association to {@link StackEntity}
    */
@@ -183,14 +180,6 @@ public class ServiceComponentDesiredStateEntity {
     return serviceComponentHistory;
   }
 
-  public boolean isRecoveryEnabled() {
-    return recoveryEnabled != 0;
-  }
-
-  public void setRecoveryEnabled(boolean recoveryEnabled) {
-    this.recoveryEnabled = (recoveryEnabled == false) ? 0 : 1;
-  }
-
   @Override
   public boolean equals(Object o) {
     if (this == o) {

http://git-wip-us.apache.org/repos/asf/ambari/blob/4d3839c7/ambari-server/src/main/java/org/apache/ambari/server/serveraction/ServerActionExecutor.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/ServerActionExecutor.java b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/ServerActionExecutor.java
index f93cf43..20cf5bb 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/ServerActionExecutor.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/ServerActionExecutor.java
@@ -392,8 +392,17 @@ public class ServerActionExecutor {
    * @throws InterruptedException
    */
   public void doWork() throws InterruptedException {
-    List<HostRoleCommand> tasks = db.getTasksByRoleAndStatus(Role.AMBARI_SERVER_ACTION.name(),
-      HostRoleStatus.QUEUED);
+    List<HostRoleCommand> tasks = db.getTasksByHostRoleAndStatus(serverHostName,
+        Role.AMBARI_SERVER_ACTION.toString(), HostRoleStatus.QUEUED);
+
+    if (null == tasks || tasks.isEmpty()) {
+      // !!! if the server is not a part of the cluster,
+      // !!! just look for anything designated AMBARI_SERVER_ACTION.
+      // !!! do we even need to worry about servername in the first place?  We're
+      // !!! _on_ the ambari server!
+      tasks = db.getTasksByRoleAndStatus(Role.AMBARI_SERVER_ACTION.name(),
+          HostRoleStatus.QUEUED);
+    }
 
     if ((tasks != null) && !tasks.isEmpty()) {
       for (HostRoleCommand task : tasks) {

http://git-wip-us.apache.org/repos/asf/ambari/blob/4d3839c7/ambari-server/src/main/java/org/apache/ambari/server/state/ServiceComponent.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/ServiceComponent.java b/ambari-server/src/main/java/org/apache/ambari/server/state/ServiceComponent.java
index dcb7cf6..7803045 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/ServiceComponent.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/ServiceComponent.java
@@ -28,20 +28,6 @@ public interface ServiceComponent {
 
   String getName();
 
-  /**
-   * Get a true or false value specifying
-   * if auto start was enabled for this component.
-   * @return true or false
-   */
-  boolean isRecoveryEnabled();
-
-  /**
-   * Set a true or false value specifying if this
-   * component is to be enabled for auto start or not.
-   * @param recoveryEnabled - true or false
-   */
-  void setRecoveryEnabled(boolean recoveryEnabled);
-
   String getServiceName();
 
   long getClusterId();

http://git-wip-us.apache.org/repos/asf/ambari/blob/4d3839c7/ambari-server/src/main/java/org/apache/ambari/server/state/ServiceComponentImpl.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/ServiceComponentImpl.java b/ambari-server/src/main/java/org/apache/ambari/server/state/ServiceComponentImpl.java
index defe808..4afc857 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/ServiceComponentImpl.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/ServiceComponentImpl.java
@@ -95,7 +95,6 @@ public class ServiceComponentImpl implements ServiceComponent {
     desiredStateEntity.setDesiredState(State.INIT);
     desiredStateEntity.setServiceName(service.getName());
     desiredStateEntity.setClusterId(service.getClusterId());
-    desiredStateEntity.setRecoveryEnabled(false);
 
     setDesiredStackVersion(service.getDesiredStackVersion());
 
@@ -182,55 +181,6 @@ public class ServiceComponentImpl implements ServiceComponent {
     return componentName;
   }
 
-  /**
-   * Get the recoveryEnabled value.
-   *
-   * @return true or false
-   */
-  @Override
-  public boolean isRecoveryEnabled() {
-    ServiceComponentDesiredStateEntity desiredStateEntity = getDesiredStateEntity();
-    if (desiredStateEntity != null) {
-      return desiredStateEntity.isRecoveryEnabled();
-    } else {
-      LOG.warn("Trying to fetch a member from an entity object that may " +
-              "have been previously deleted, serviceName = " + service.getName() + ", " +
-              "componentName = " + componentName);
-    }
-    return false;
-  }
-
-  /**
-   * Set the recoveryEnabled field in the entity object.
-   *
-   * @param recoveryEnabled - true or false
-   */
-  @Override
-  public void setRecoveryEnabled(boolean recoveryEnabled) {
-    readWriteLock.writeLock().lock();
-    try {
-      if (LOG.isDebugEnabled()) {
-        LOG.debug("Setting RecoveryEnabled of Component" + ", clusterName="
-                + service.getCluster().getClusterName() + ", clusterId="
-                + service.getCluster().getClusterId() + ", serviceName="
-                + service.getName() + ", componentName=" + getName()
-                + ", oldRecoveryEnabled=" + isRecoveryEnabled() + ", newRecoveryEnabled="
-                + recoveryEnabled);
-      }
-      ServiceComponentDesiredStateEntity desiredStateEntity = getDesiredStateEntity();
-      if (desiredStateEntity != null) {
-        desiredStateEntity.setRecoveryEnabled(recoveryEnabled);
-        saveIfPersisted(desiredStateEntity);
-      } else {
-        LOG.warn("Setting a member on an entity object that may have been " +
-                "previously deleted, serviceName = " + service.getName());
-      }
-
-    } finally {
-      readWriteLock.writeLock().unlock();
-    }
-  }
-
   @Override
   public String getServiceName() {
     return service.getName();
@@ -297,8 +247,7 @@ public class ServiceComponentImpl implements ServiceComponent {
               + ", clusterId=" + service.getCluster().getClusterId()
               + ", serviceName=" + service.getName()
               + ", serviceComponentName=" + getName()
-              + ", hostname=" + hostComponent.getHostName()
-              + ", recoveryEnabled=" + isRecoveryEnabled());
+              + ", hostname=" + hostComponent.getHostName());
         }
         if (hostComponents.containsKey(hostComponent.getHostName())) {
           throw new AmbariException("Cannot add duplicate ServiceComponentHost"
@@ -306,8 +255,7 @@ public class ServiceComponentImpl implements ServiceComponent {
               + ", clusterId=" + service.getCluster().getClusterId()
               + ", serviceName=" + service.getName()
               + ", serviceComponentName=" + getName()
-              + ", hostname=" + hostComponent.getHostName()
-              + ", recoveryEnabled=" + isRecoveryEnabled());
+              + ", hostname=" + hostComponent.getHostName());
         }
         // FIXME need a better approach of caching components by host
         ClusterImpl clusterImpl = (ClusterImpl) service.getCluster();
@@ -335,7 +283,6 @@ public class ServiceComponentImpl implements ServiceComponent {
               + ", clusterId=" + service.getCluster().getClusterId()
               + ", serviceName=" + service.getName()
               + ", serviceComponentName=" + getName()
-              + ", recoveryEnabled=" + isRecoveryEnabled()
               + ", hostname=" + hostName);
         }
         if (hostComponents.containsKey(hostName)) {
@@ -344,7 +291,6 @@ public class ServiceComponentImpl implements ServiceComponent {
               + ", clusterId=" + service.getCluster().getClusterId()
               + ", serviceName=" + service.getName()
               + ", serviceComponentName=" + getName()
-              + ", recoveryEnabled=" + isRecoveryEnabled()
               + ", hostname=" + hostName);
         }
         ServiceComponentHost hostComponent = serviceComponentHostFactory.createNew(this, hostName);
@@ -408,11 +354,11 @@ public class ServiceComponentImpl implements ServiceComponent {
     try {
       if (LOG.isDebugEnabled()) {
         LOG.debug("Setting DesiredState of Service" + ", clusterName="
-                + service.getCluster().getClusterName() + ", clusterId="
-                + service.getCluster().getClusterId() + ", serviceName="
-                + service.getName() + ", serviceComponentName=" + getName()
-                + ", oldDesiredState=" + getDesiredState() + ", newDesiredState="
-                + state);
+            + service.getCluster().getClusterName() + ", clusterId="
+            + service.getCluster().getClusterId() + ", serviceName="
+            + service.getName() + ", serviceComponentName=" + getName()
+            + ", oldDesiredState=" + getDesiredState() + ", newDesiredState="
+            + state);
       }
       ServiceComponentDesiredStateEntity desiredStateEntity = getDesiredStateEntity();
       if (desiredStateEntity != null) {
@@ -482,8 +428,7 @@ public class ServiceComponentImpl implements ServiceComponent {
       ServiceComponentResponse r = new ServiceComponentResponse(getClusterId(),
           cluster.getClusterName(), service.getName(), getName(),
           getDesiredStackVersion().getStackId(), getDesiredState().toString(),
-          getTotalCount(), getStartedCount(), getInstalledCount(),
-          isRecoveryEnabled());
+          getTotalCount(), getStartedCount(), getInstalledCount());
       return r;
     } finally {
       readWriteLock.readLock().unlock();
@@ -495,13 +440,11 @@ public class ServiceComponentImpl implements ServiceComponent {
     return service.getCluster().getClusterName();
   }
 
-
   @Override
   public void debugDump(StringBuilder sb) {
     readWriteLock.readLock().lock();
     try {
       sb.append("ServiceComponent={ serviceComponentName=" + getName()
-          + ", recoveryEnabled=" + isRecoveryEnabled()
           + ", clusterName=" + service.getCluster().getClusterName()
           + ", clusterId=" + service.getCluster().getClusterId()
           + ", serviceName=" + service.getName() + ", desiredStackVersion="
@@ -649,7 +592,6 @@ public class ServiceComponentImpl implements ServiceComponent {
                 + ", clusterName=" + getClusterName()
                 + ", serviceName=" + getServiceName()
                 + ", componentName=" + getName()
-                + ", recoveryEnabled=" + isRecoveryEnabled()
                 + ", hostname=" + sch.getHostName());
             return false;
           }
@@ -673,8 +615,7 @@ public class ServiceComponentImpl implements ServiceComponent {
         LOG.info("Deleting all servicecomponenthosts for component"
             + ", clusterName=" + getClusterName()
             + ", serviceName=" + getServiceName()
-            + ", componentName=" + getName()
-            + ", recoveryEnabled=" + isRecoveryEnabled());
+            + ", componentName=" + getName());
         for (ServiceComponentHost sch : hostComponents.values()) {
           if (!sch.canBeRemoved()) {
             throw new AmbariException("Found non removable hostcomponent "
@@ -683,7 +624,6 @@ public class ServiceComponentImpl implements ServiceComponent {
                 + ", clusterName=" + getClusterName()
                 + ", serviceName=" + getServiceName()
                 + ", componentName=" + getName()
-                + ", recoveryEnabled=" + isRecoveryEnabled()
                 + ", hostname=" + sch.getHostName());
           }
         }
@@ -712,14 +652,12 @@ public class ServiceComponentImpl implements ServiceComponent {
             + ", clusterName=" + getClusterName()
             + ", serviceName=" + getServiceName()
             + ", componentName=" + getName()
-            + ", recoveryEnabled=" + isRecoveryEnabled()
             + ", hostname=" + sch.getHostName());
         if (!sch.canBeRemoved()) {
           throw new AmbariException("Could not delete hostcomponent from cluster"
               + ", clusterName=" + getClusterName()
               + ", serviceName=" + getServiceName()
               + ", componentName=" + getName()
-              + ", recoveryEnabled=" + isRecoveryEnabled()
               + ", hostname=" + sch.getHostName());
         }
         sch.delete();

http://git-wip-us.apache.org/repos/asf/ambari/blob/4d3839c7/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClusterImpl.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClusterImpl.java b/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClusterImpl.java
index 07addfc..8230fe3 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClusterImpl.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClusterImpl.java
@@ -2474,31 +2474,13 @@ public class ClusterImpl implements Cluster {
     clusterGlobalLock.readLock().lock();
     try {
       List<ServiceConfigVersionResponse> serviceConfigVersionResponses = new ArrayList<ServiceConfigVersionResponse>();
+      Set<Long> activeIds = getActiveServiceConfigVersionIds();
 
-      List<ServiceConfigEntity> serviceConfigs = serviceConfigDAO.getServiceConfigs(getClusterId());
-      Map<String, ServiceConfigVersionResponse> activeServiceConfigResponses = new HashMap<>();
-
-      for (ServiceConfigEntity serviceConfigEntity : serviceConfigs) {
+      for (ServiceConfigEntity serviceConfigEntity : serviceConfigDAO.getServiceConfigs(getClusterId())) {
         ServiceConfigVersionResponse serviceConfigVersionResponse = convertToServiceConfigVersionResponse(serviceConfigEntity);
 
-        ServiceConfigVersionResponse activeServiceConfigResponse = activeServiceConfigResponses.get(serviceConfigVersionResponse.getServiceName());
-        if (activeServiceConfigResponse == null) {
-          activeServiceConfigResponse = serviceConfigVersionResponse;
-          activeServiceConfigResponses.put(serviceConfigVersionResponse.getServiceName(), serviceConfigVersionResponse);
-        }
-
         serviceConfigVersionResponse.setConfigurations(new ArrayList<ConfigurationResponse>());
-
-        if (serviceConfigEntity.getGroupId() == null) {
-          if (serviceConfigVersionResponse.getCreateTime() > activeServiceConfigResponse.getCreateTime())
-            activeServiceConfigResponses.put(serviceConfigVersionResponse.getServiceName(), serviceConfigVersionResponse);
-        }
-        else if (clusterConfigGroups != null && clusterConfigGroups.containsKey(serviceConfigEntity.getGroupId())){
-          if (serviceConfigVersionResponse.getVersion() > activeServiceConfigResponse.getVersion())
-            activeServiceConfigResponses.put(serviceConfigVersionResponse.getServiceName(), serviceConfigVersionResponse);
-        }
-
-        serviceConfigVersionResponse.setIsCurrent(false);
+        serviceConfigVersionResponse.setIsCurrent(activeIds.contains(serviceConfigEntity.getServiceConfigId()));
 
         List<ClusterConfigEntity> clusterConfigEntities = serviceConfigEntity.getClusterConfigEntities();
         for (ClusterConfigEntity clusterConfigEntity : clusterConfigEntities) {
@@ -2514,10 +2496,6 @@ public class ClusterImpl implements Cluster {
         serviceConfigVersionResponses.add(serviceConfigVersionResponse);
       }
 
-      for (ServiceConfigVersionResponse serviceConfigVersionResponse: activeServiceConfigResponses.values()) {
-        serviceConfigVersionResponse.setIsCurrent(true);
-      }
-
       return serviceConfigVersionResponses;
     } finally {
       clusterGlobalLock.readLock().unlock();
@@ -2536,6 +2514,14 @@ public class ClusterImpl implements Cluster {
     return responses;
   }
 
+  private Set<Long> getActiveServiceConfigVersionIds() {
+    Set<Long> idSet = new HashSet<Long>();
+    for (ServiceConfigEntity entity : getActiveServiceConfigVersionEntities()) {
+      idSet.add(entity.getServiceConfigId());
+    }
+    return idSet;
+  }
+
   private List<ServiceConfigEntity> getActiveServiceConfigVersionEntities() {
 
     List<ServiceConfigEntity> activeServiceConfigVersions = new ArrayList<ServiceConfigEntity>();

http://git-wip-us.apache.org/repos/asf/ambari/blob/4d3839c7/ambari-server/src/main/java/org/apache/ambari/server/state/services/AlertNoticeDispatchService.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/services/AlertNoticeDispatchService.java b/ambari-server/src/main/java/org/apache/ambari/server/state/services/AlertNoticeDispatchService.java
index 0b84568..a27bc1d 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/services/AlertNoticeDispatchService.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/services/AlertNoticeDispatchService.java
@@ -451,7 +451,7 @@ public class AlertNoticeDispatchService extends AbstractScheduledService {
     String targetType = target.getNotificationType();
 
     // build the velocity objects for template rendering
-    AmbariInfo ambari = new AmbariInfo(m_metaInfo.get(), m_configuration);
+    AmbariInfo ambari = new AmbariInfo(m_metaInfo.get());
     AlertSummaryInfo summary = new AlertSummaryInfo(histories);
     DispatchInfo dispatch = new DispatchInfo(target);
 
@@ -516,7 +516,7 @@ public class AlertNoticeDispatchService extends AbstractScheduledService {
     String targetType = target.getNotificationType();
 
     // build the velocity objects for template rendering
-    AmbariInfo ambari = new AmbariInfo(m_metaInfo.get(), m_configuration);
+    AmbariInfo ambari = new AmbariInfo(m_metaInfo.get());
     AlertInfo alert = new AlertInfo(history);
     DispatchInfo dispatch = new DispatchInfo(target);
 
@@ -558,10 +558,6 @@ public class AlertNoticeDispatchService extends AbstractScheduledService {
         bodyWriter.write(alert.getAlertName());
         bodyWriter.write(" ");
         bodyWriter.write(alert.getAlertText());
-        if (alert.hasHostName()) {
-          bodyWriter.write(" ");
-          bodyWriter.append(alert.getHostName());
-        }
         bodyWriter.write("\n");
       }
     }
@@ -1046,8 +1042,7 @@ public class AlertNoticeDispatchService extends AbstractScheduledService {
      *
      * @param metaInfo
      */
-    protected AmbariInfo(AmbariMetaInfo metaInfo, Configuration m_configuration) {
-      m_url = m_configuration.getAmbariDisplayUrl();
+    protected AmbariInfo(AmbariMetaInfo metaInfo) {
       m_version = metaInfo.getServerVersion();
     }
 
@@ -1058,10 +1053,6 @@ public class AlertNoticeDispatchService extends AbstractScheduledService {
       return m_hostName;
     }
 
-    public boolean hasUrl() {
-      return m_url != null;
-    }
-
     /**
      * @return the url
      */
@@ -1202,4 +1193,4 @@ public class AlertNoticeDispatchService extends AbstractScheduledService {
       return m_body;
     }
   }
-}
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/4d3839c7/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog222.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog222.java b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog222.java
index 0aa1e7a..88b3151 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog222.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog222.java
@@ -115,7 +115,6 @@ public class UpgradeCatalog222 extends AbstractUpgradeCatalog {
     updateAlerts();
     updateStormConfigs();
     updateAMSConfigs();
-    updateHostRoleCommands();
   }
 
   protected void updateStormConfigs() throws  AmbariException {
@@ -154,10 +153,6 @@ public class UpgradeCatalog222 extends AbstractUpgradeCatalog {
 
   }
 
-  protected void updateHostRoleCommands() throws SQLException{
-    dbAccessor.createIndex("idx_hrc_status", "host_role_command", "status", "role");
-  }
-
   protected void updateAMSConfigs() throws AmbariException {
     AmbariManagementController ambariManagementController = injector.getInstance(AmbariManagementController.class);
     Clusters clusters = ambariManagementController.getClusters();

http://git-wip-us.apache.org/repos/asf/ambari/blob/4d3839c7/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog240.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog240.java b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog240.java
index 4e99c89..09f31e4 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog240.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog240.java
@@ -32,7 +32,6 @@ import java.util.concurrent.atomic.AtomicLong;
 
 import org.apache.ambari.server.AmbariException;
 import org.apache.ambari.server.controller.AmbariManagementController;
-import org.apache.ambari.server.orm.DBAccessor;
 import org.apache.ambari.server.orm.DBAccessor.DBColumnInfo;
 import org.apache.ambari.server.orm.dao.AlertDefinitionDAO;
 import org.apache.ambari.server.orm.dao.DaoUtils;
@@ -90,8 +89,6 @@ public class UpgradeCatalog240 extends AbstractUpgradeCatalog {
   private static final String ID = "id";
   private static final String SETTING_TABLE = "setting";
 
-  protected static final String SERVICE_COMPONENT_DESIRED_STATE_TABLE = "servicecomponentdesiredstate";
-  protected static final String RECOVERY_ENABLED_COL = "recovery_enabled";
 
   // ----- Constructors ------------------------------------------------------
 
@@ -130,7 +127,6 @@ public class UpgradeCatalog240 extends AbstractUpgradeCatalog {
   @Override
   protected void executeDDLUpdates() throws AmbariException, SQLException {
     updateAdminPermissionTable();
-    updateServiceComponentDesiredStateTable();
     createSettingTable();
     updateRepoVersionTableDDL();
     updateServiceComponentDesiredStateTableDDL();
@@ -566,14 +562,4 @@ public class UpgradeCatalog240 extends AbstractUpgradeCatalog {
 
     addSequence("servicecomponent_history_id_seq", 0L, false);
   }
-  /**
-   * Alter servicecomponentdesiredstate table to add recovery_enabled column.
-   * @throws SQLException
-   */
-  private void updateServiceComponentDesiredStateTable() throws SQLException {
-    // ALTER TABLE servicecomponentdesiredstate ADD COLUMN
-    // recovery_enabled SMALLINT DEFAULT 0 NOT NULL
-    dbAccessor.addColumn(SERVICE_COMPONENT_DESIRED_STATE_TABLE,
-            new DBAccessor.DBColumnInfo(RECOVERY_ENABLED_COL, Short.class, null, 0, false));
-  }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/4d3839c7/ambari-server/src/main/resources/Ambari-DDL-Derby-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-Derby-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-Derby-CREATE.sql
index 73cf84e..2db745b 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-Derby-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-Derby-CREATE.sql
@@ -177,10 +177,8 @@ CREATE TABLE servicecomponentdesiredstate (
   desired_stack_id BIGINT NOT NULL,
   desired_state VARCHAR(255) NOT NULL,
   service_name VARCHAR(255) NOT NULL,
-  recovery_enabled SMALLINT NOT NULL DEFAULT 0,
   CONSTRAINT pk_sc_desiredstate PRIMARY KEY (id),
   CONSTRAINT unq_scdesiredstate_name UNIQUE(component_name, service_name, cluster_id)
-  PRIMARY KEY (component_name, cluster_id, service_name)
 );
 
 CREATE TABLE servicedesiredstate (

http://git-wip-us.apache.org/repos/asf/ambari/blob/4d3839c7/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql
index 9353ac2..b892bc8 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql
@@ -178,10 +178,8 @@ CREATE TABLE servicecomponentdesiredstate (
   desired_stack_id BIGINT NOT NULL,
   desired_state VARCHAR(255) NOT NULL,
   service_name VARCHAR(100) NOT NULL,
-  recovery_enabled SMALLINT NOT NULL DEFAULT 0,
   CONSTRAINT pk_sc_desiredstate PRIMARY KEY (id),
   CONSTRAINT unq_scdesiredstate_name UNIQUE(component_name, service_name, cluster_id)
-  PRIMARY KEY (component_name, cluster_id, service_name)
 );
 
 CREATE TABLE servicedesiredstate (
@@ -685,7 +683,6 @@ CREATE TABLE setting (
 -- tasks indices --
 CREATE INDEX idx_stage_request_id ON stage (request_id);
 CREATE INDEX idx_hrc_request_id ON host_role_command (request_id);
-CREATE INDEX idx_hrc_status_role ON host_role_command (status, role);
 CREATE INDEX idx_rsc_request_id ON role_success_criteria (request_id);
 
 -- altering tables by creating unique constraints----------

http://git-wip-us.apache.org/repos/asf/ambari/blob/4d3839c7/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql
index 5f39b44..026efea 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql
@@ -168,10 +168,8 @@ CREATE TABLE servicecomponentdesiredstate (
   desired_stack_id NUMBER(19) NOT NULL,
   desired_state VARCHAR2(255) NOT NULL,
   service_name VARCHAR2(255) NOT NULL,
-  recovery_enabled SMALLINT DEFAULT 0 NOT NULL,
   CONSTRAINT pk_sc_desiredstate PRIMARY KEY (id),
   CONSTRAINT unq_scdesiredstate_name UNIQUE(component_name, service_name, cluster_id)
-  PRIMARY KEY (component_name, cluster_id, service_name)
 );
 
 CREATE TABLE servicedesiredstate (
@@ -674,7 +672,6 @@ CREATE TABLE setting (
 -- tasks indices --
 CREATE INDEX idx_stage_request_id ON stage (request_id);
 CREATE INDEX idx_hrc_request_id ON host_role_command (request_id);
-CREATE INDEX idx_hrc_status_role ON host_role_command (status, role);
 CREATE INDEX idx_rsc_request_id ON role_success_criteria (request_id);
 
 --------altering tables by creating unique constraints----------

http://git-wip-us.apache.org/repos/asf/ambari/blob/4d3839c7/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql
index 4a8fa2a..fb9889d 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql
@@ -177,10 +177,8 @@ CREATE TABLE servicecomponentdesiredstate (
   desired_stack_id BIGINT NOT NULL,
   desired_state VARCHAR(255) NOT NULL,
   service_name VARCHAR(255) NOT NULL,
-  recovery_enabled SMALLINT NOT NULL DEFAULT 0,
   CONSTRAINT pk_sc_desiredstate PRIMARY KEY (id),
   CONSTRAINT unq_scdesiredstate_name UNIQUE(component_name, service_name, cluster_id)
-  PRIMARY KEY (component_name, cluster_id, service_name)
 );
 
 CREATE TABLE servicedesiredstate (
@@ -678,11 +676,8 @@ CREATE TABLE setting (
 -- tasks indices --
 CREATE INDEX idx_stage_request_id ON stage (request_id);
 CREATE INDEX idx_hrc_request_id ON host_role_command (request_id);
-CREATE INDEX idx_hrc_status_role ON host_role_command (status, role);
 CREATE INDEX idx_rsc_request_id ON role_success_criteria (request_id);
 
-
-
 --------altering tables by creating unique constraints----------
 ALTER TABLE users ADD CONSTRAINT UNQ_users_0 UNIQUE (user_name, user_type);
 ALTER TABLE clusterconfig ADD CONSTRAINT UQ_config_type_tag UNIQUE (cluster_id, type_name, version_tag);


[2/3] ambari git commit: Revert "Merge with trunk"

Posted by nc...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/4d3839c7/ambari-server/src/main/resources/Ambari-DDL-Postgres-EMBEDDED-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-Postgres-EMBEDDED-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-Postgres-EMBEDDED-CREATE.sql
index 105d695..3cc7516 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-Postgres-EMBEDDED-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-Postgres-EMBEDDED-CREATE.sql
@@ -202,10 +202,8 @@ CREATE TABLE ambari.servicecomponentdesiredstate (
   desired_stack_id BIGINT NOT NULL,
   desired_state VARCHAR(255) NOT NULL,
   service_name VARCHAR(255) NOT NULL,
-  recovery_enabled SMALLINT NOT NULL DEFAULT 0,
   CONSTRAINT pk_sc_desiredstate PRIMARY KEY (id),
   CONSTRAINT unq_scdesiredstate_name UNIQUE(component_name, service_name, cluster_id)
-  PRIMARY KEY (component_name, cluster_id, service_name)
 );
 GRANT ALL PRIVILEGES ON TABLE ambari.servicecomponentdesiredstate TO :username;
 
@@ -760,7 +758,6 @@ GRANT ALL PRIVILEGES ON TABLE ambari.setting TO :username;
 -- tasks indices --
 CREATE INDEX idx_stage_request_id ON ambari.stage (request_id);
 CREATE INDEX idx_hrc_request_id ON ambari.host_role_command (request_id);
-CREATE INDEX idx_hrc_status_role ON ambari.host_role_command (status, role);
 CREATE INDEX idx_rsc_request_id ON ambari.role_success_criteria (request_id);
 
 --------altering tables by creating unique constraints----------

http://git-wip-us.apache.org/repos/asf/ambari/blob/4d3839c7/ambari-server/src/main/resources/Ambari-DDL-SQLAnywhere-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-SQLAnywhere-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-SQLAnywhere-CREATE.sql
index a897454..a5bfdc2 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-SQLAnywhere-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-SQLAnywhere-CREATE.sql
@@ -167,10 +167,8 @@ CREATE TABLE servicecomponentdesiredstate (
   desired_stack_id NUMERIC(19) NOT NULL,
   desired_state VARCHAR(255) NOT NULL,
   service_name VARCHAR(255) NOT NULL,
-  recovery_enabled SMALLINT NOT NULL DEFAULT 0,
   CONSTRAINT pk_sc_desiredstate PRIMARY KEY (id),
   CONSTRAINT unq_scdesiredstate_name UNIQUE(component_name, service_name, cluster_id)
-  PRIMARY KEY (component_name, cluster_id, service_name)
 );
 
 CREATE TABLE servicedesiredstate (
@@ -675,7 +673,6 @@ CREATE TABLE setting (
 -- tasks indices --
 CREATE INDEX idx_stage_request_id ON stage (request_id);
 CREATE INDEX idx_hrc_request_id ON host_role_command (request_id);
-CREATE INDEX idx_hrc_status_role ON host_role_command (status, role);
 CREATE INDEX idx_rsc_request_id ON role_success_criteria (request_id);
 
 -- altering tables by creating unique constraints----------

http://git-wip-us.apache.org/repos/asf/ambari/blob/4d3839c7/ambari-server/src/main/resources/Ambari-DDL-SQLServer-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-SQLServer-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-SQLServer-CREATE.sql
index a8bda7c..8e5b2f8 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-SQLServer-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-SQLServer-CREATE.sql
@@ -187,12 +187,10 @@ CREATE TABLE servicecomponentdesiredstate (
   desired_stack_id BIGINT NOT NULL,
   desired_state VARCHAR(255) NOT NULL,
   service_name VARCHAR(255) NOT NULL,
-  recovery_enabled SMALLINT NOT NULL DEFAULT 0,
   PRIMARY KEY CLUSTERED (id),
   CONSTRAINT pk_sc_desiredstate PRIMARY KEY (id),
   CONSTRAINT unq_scdesiredstate_name UNIQUE(component_name, service_name, cluster_id)
-  PRIMARY KEY CLUSTERED (component_name, cluster_id, service_name)
-  );
+);
 
 CREATE TABLE servicedesiredstate (
   cluster_id BIGINT NOT NULL,
@@ -784,7 +782,6 @@ CREATE TABLE setting (
 -- tasks indices --
 CREATE INDEX idx_stage_request_id ON stage (request_id);
 CREATE INDEX idx_hrc_request_id ON host_role_command (request_id);
-CREATE INDEX idx_hrc_status_role ON host_role_command (status, role);
 CREATE INDEX idx_rsc_request_id ON role_success_criteria (request_id);
 
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/4d3839c7/ambari-server/src/main/resources/alert-templates.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/alert-templates.xml b/ambari-server/src/main/resources/alert-templates.xml
index 2e8fc71..d7fe320 100644
--- a/ambari-server/src/main/resources/alert-templates.xml
+++ b/ambari-server/src/main/resources/alert-templates.xml
@@ -158,20 +158,6 @@
                   <div class="label-small">
                     $alert.getAlertText()
                   </div>
-                  <div class="label-small">
-                    Cluster: $alert.getAlertDefinition().getCluster().getClusterName()
-                  </div>
-                  #if( $alert.getHostName() )
-                    #if( $ambari.hasUrl() )
-                      <div class="label-small">
-                      Host: <a href=$ambari.getUrl()/#/main/hosts/$alert.getHostName()/summary>$ambari.getUrl()/#/main/hosts/$alert.getHostName()/summary</a>
-                      </div>
-                    #else
-                      <div class="label-small">
-                        Host: $alert.getHostName()
-                      </div>
-                    #end
-                  #end
                 </td>
               </tr>
             #end
@@ -184,10 +170,6 @@
     This notification was sent to $dispatch.getTargetName()
     <br/>
     Apache Ambari $ambari.getServerVersion()
-    #if( $ambari.hasUrl() )
-    <br/>
-    Ambari Server link: <a href=$ambari.getUrl()>$ambari.getUrl()</a>
-    #end
   </div>
 </html>
       ]]>
@@ -211,4 +193,4 @@
 $alert.getAlertText()]]>
     </body>
   </alert-template>  
-</alert-templates>
+</alert-templates>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/4d3839c7/ambari-server/src/main/resources/common-services/KAFKA/0.8.1.2.2/configuration/kafka-broker.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/KAFKA/0.8.1.2.2/configuration/kafka-broker.xml b/ambari-server/src/main/resources/common-services/KAFKA/0.8.1.2.2/configuration/kafka-broker.xml
index 857f40e..704d73f 100644
--- a/ambari-server/src/main/resources/common-services/KAFKA/0.8.1.2.2/configuration/kafka-broker.xml
+++ b/ambari-server/src/main/resources/common-services/KAFKA/0.8.1.2.2/configuration/kafka-broker.xml
@@ -325,6 +325,7 @@
     <description>Timeline port</description>
   </property>
   <property>
+  <property>
     <name>kafka.timeline.metrics.protocol</name>
     <value>{{metric_collector_protocol}}</value>
     <description>Timeline protocol(http or https)</description>

http://git-wip-us.apache.org/repos/asf/ambari/blob/4d3839c7/ambari-server/src/main/resources/properties.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/properties.json b/ambari-server/src/main/resources/properties.json
index 8ccae05..4052ad2 100644
--- a/ambari-server/src/main/resources/properties.json
+++ b/ambari-server/src/main/resources/properties.json
@@ -62,7 +62,6 @@
         "ServiceComponentInfo/total_count",
         "ServiceComponentInfo/started_count",
         "ServiceComponentInfo/installed_count",
-        "ServiceComponentInfo/recovery_enabled",
         "params/run_smoke_test",
         "_"
     ],

http://git-wip-us.apache.org/repos/asf/ambari/blob/4d3839c7/ambari-server/src/main/resources/scripts/Ambaripreupload.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/scripts/Ambaripreupload.py b/ambari-server/src/main/resources/scripts/Ambaripreupload.py
index fb0c3e1..cc6213e 100644
--- a/ambari-server/src/main/resources/scripts/Ambaripreupload.py
+++ b/ambari-server/src/main/resources/scripts/Ambaripreupload.py
@@ -132,7 +132,6 @@ with Environment() as env:
   TAR_DESTINATION_FOLDER_SUFFIX = "_tar_destination_folder"
   
   class params:
-    hdfs_path_prefix = hdfs_path_prefix
     hdfs_user = "hdfs"
     mapred_user ="mapred"
     hadoop_bin_dir="/usr/hdp/" + hdp_version + "/hadoop/bin"
@@ -237,23 +236,23 @@ with Environment() as env:
     return _copy_files(source_and_dest_pairs, file_owner, group_owner, kinit_if_needed)
   
   def createHdfsResources():
-    params.HdfsResource(format('{hdfs_path_prefix}/atshistory'), user='hdfs', change_permissions_for_parents=True, owner='yarn', group='hadoop', type='directory', action= ['create_on_execute'], mode=0755)
-    params.HdfsResource(format('{hdfs_path_prefix}/user/hcat'), owner='hcat', type='directory', action=['create_on_execute'], mode=0755)
-    params.HdfsResource(format('{hdfs_path_prefix}/hive/warehouse'), owner='hive', type='directory', action=['create_on_execute'], mode=0777)
-    params.HdfsResource(format('{hdfs_path_prefix}/user/hive'), owner='hive', type='directory', action=['create_on_execute'], mode=0755)
-    params.HdfsResource(format('{hdfs_path_prefix}/tmp'), mode=0777, action=['create_on_execute'], type='directory', owner='hdfs')
-    params.HdfsResource(format('{hdfs_path_prefix}/user/ambari-qa'), type='directory', action=['create_on_execute'], mode=0770)
-    params.HdfsResource(format('{hdfs_path_prefix}/user/oozie'), owner='oozie', type='directory', action=['create_on_execute'], mode=0775)
-    params.HdfsResource(format('{hdfs_path_prefix}/app-logs'), recursive_chmod=True, owner='yarn', group='hadoop', type='directory', action=['create_on_execute'], mode=0777)
-    params.HdfsResource(format('{hdfs_path_prefix}/tmp/entity-file-history/active'), owner='yarn', group='hadoop', type='directory', action=['create_on_execute'])
-    params.HdfsResource(format('{hdfs_path_prefix}/mapred'), owner='mapred', type='directory', action=['create_on_execute'])
-    params.HdfsResource(format('{hdfs_path_prefix}/mapred/system'), owner='hdfs', type='directory', action=['create_on_execute'])
-    params.HdfsResource(format('{hdfs_path_prefix}/mr-history/done'), change_permissions_for_parents=True, owner='mapred', group='hadoop', type='directory', action=['create_on_execute'], mode=0777)
-    params.HdfsResource(format('{hdfs_path_prefix}/atshistory/done'), owner='yarn', group='hadoop', type='directory', action=['create_on_execute'], mode=0700)
-    params.HdfsResource(format('{hdfs_path_prefix}/atshistory/active'), owner='yarn', group='hadoop', type='directory', action=['create_on_execute'], mode=01777)
-    params.HdfsResource(format('{hdfs_path_prefix}/ams/hbase'), owner='ams', type='directory', action=['create_on_execute'], mode=0775)
-    params.HdfsResource(format('{hdfs_path_prefix}/amshbase/staging'), owner='ams', type='directory', action=['create_on_execute'], mode=0711)
-    params.HdfsResource(format('{hdfs_path_prefix}/user/ams/hbase'), owner='ams', type='directory', action=['create_on_execute'], mode=0775)
+    params.HdfsResource('/atshistory', user='hdfs', change_permissions_for_parents=True, owner='yarn', group='hadoop', type='directory', action= ['create_on_execute'], mode=0755)
+    params.HdfsResource('/user/hcat', owner='hcat', type='directory', action=['create_on_execute'], mode=0755)
+    params.HdfsResource('/hive/warehouse', owner='hive', type='directory', action=['create_on_execute'], mode=0777)
+    params.HdfsResource('/user/hive', owner='hive', type='directory', action=['create_on_execute'], mode=0755)
+    params.HdfsResource('/tmp', mode=0777, action=['create_on_execute'], type='directory', owner='hdfs')
+    params.HdfsResource('/user/ambari-qa', type='directory', action=['create_on_execute'], mode=0770)
+    params.HdfsResource('/user/oozie', owner='oozie', type='directory', action=['create_on_execute'], mode=0775)
+    params.HdfsResource('/app-logs', recursive_chmod=True, owner='yarn', group='hadoop', type='directory', action=['create_on_execute'], mode=0777)
+    params.HdfsResource('/tmp/entity-file-history/active', owner='yarn', group='hadoop', type='directory', action=['create_on_execute'])
+    params.HdfsResource('/mapred', owner='mapred', type='directory', action=['create_on_execute'])
+    params.HdfsResource('/mapred/system', owner='hdfs', type='directory', action=['create_on_execute'])
+    params.HdfsResource('/mr-history/done', change_permissions_for_parents=True, owner='mapred', group='hadoop', type='directory', action=['create_on_execute'], mode=0777)
+    params.HdfsResource('/atshistory/done', owner='yarn', group='hadoop', type='directory', action=['create_on_execute'], mode=0700)
+    params.HdfsResource('/atshistory/active', owner='yarn', group='hadoop', type='directory', action=['create_on_execute'], mode=01777)
+    params.HdfsResource('/ams/hbase', owner='ams', type='directory', action=['create_on_execute'], mode=0775)
+    params.HdfsResource('/amshbase/staging', owner='ams', type='directory', action=['create_on_execute'], mode=0711)
+    params.HdfsResource('/user/ams/hbase', owner='ams', type='directory', action=['create_on_execute'], mode=0775)
 
 
   def putCreatedHdfsResourcesToIgnore(env):
@@ -263,16 +262,14 @@ with Environment() as env:
     
     file_content = ""
     for file in env.config['hdfs_files']:
-      if not file['target'].startswith(hdfs_path_prefix):
-        raise Exception("Something created outside hdfs_path_prefix!")
-      file_content += file['target'][len(hdfs_path_prefix):]
+      file_content += file['target']
       file_content += "\n"
       
     with open("/var/lib/ambari-agent/data/.hdfs_resource_ignore", "a+") as fp:
       fp.write(file_content)
       
   def putSQLDriverToOozieShared():
-    params.HdfsResource(hdfs_path_prefix + '/user/oozie/share/lib/sqoop/{0}'.format(os.path.basename(SQL_DRIVER_PATH)),
+    params.HdfsResource('/user/oozie/share/lib/sqoop/{0}'.format(os.path.basename(SQL_DRIVER_PATH)),
                         owner='hdfs', type='file', action=['create_on_execute'], mode=0644, source=SQL_DRIVER_PATH)
       
   env.set_params(params)

http://git-wip-us.apache.org/repos/asf/ambari/blob/4d3839c7/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/stack_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/stack_advisor.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/stack_advisor.py
index f6f8cde..7c69ac9 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/stack_advisor.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/stack_advisor.py
@@ -581,8 +581,8 @@ class HDP206StackAdvisor(DefaultStackAdvisor):
         putAmsHbaseSiteProperty("hbase.regionserver.global.memstore.upperLimit", 0.3)
         putAmsHbaseSiteProperty("hbase.regionserver.global.memstore.lowerLimit", 0.25)
         putAmsHbaseSiteProperty("phoenix.query.maxGlobalMemoryPercentage", 20)
-        putAmsHbaseSiteProperty("phoenix.coprocessor.maxMetaDataCacheSize", 81920000)
         putAmsSiteProperty("phoenix.query.maxGlobalMemoryPercentage", 30)
+        putAmsHbaseSiteProperty("phoenix.coprocessor.maxMetaDataCacheSize", 81920000)
       elif total_sinks_count >= 500:
         putAmsHbaseSiteProperty("hbase.regionserver.handler.count", 60)
         putAmsHbaseSiteProperty("hbase.regionserver.hlog.blocksize", 134217728)
@@ -593,9 +593,6 @@ class HDP206StackAdvisor(DefaultStackAdvisor):
         putAmsHbaseSiteProperty("phoenix.coprocessor.maxMetaDataCacheSize", 20480000)
       pass
 
-    metrics_api_handlers = min(50, max(20, int(total_sinks_count / 100)))
-    putAmsSiteProperty("timeline.metrics.service.handler.thread.count", metrics_api_handlers)
-
     # Distributed mode heap size
     if operatingMode == "distributed":
       hbase_heapsize = max(hbase_heapsize, 756)

http://git-wip-us.apache.org/repos/asf/ambari/blob/4d3839c7/ambari-server/src/main/resources/stacks/HDP/2.2/services/stack_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2/services/stack_advisor.py b/ambari-server/src/main/resources/stacks/HDP/2.2/services/stack_advisor.py
index cd25d77..dc968cc 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.2/services/stack_advisor.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2/services/stack_advisor.py
@@ -367,16 +367,13 @@ class HDP22StackAdvisor(HDP21StackAdvisor):
 
     yarn_queues = "default"
     capacitySchedulerProperties = {}
-    if "capacity-scheduler" in services['configurations']:
-      if "capacity-scheduler" in services['configurations']["capacity-scheduler"]["properties"]:
-        properties = str(services['configurations']["capacity-scheduler"]["properties"]["capacity-scheduler"]).split('\n')
-        for property in properties:
-          key,sep,value = property.partition("=")
-          capacitySchedulerProperties[key] = value
-      if "yarn.scheduler.capacity.root.queues" in capacitySchedulerProperties:
-        yarn_queues = str(capacitySchedulerProperties["yarn.scheduler.capacity.root.queues"])
-      elif "yarn.scheduler.capacity.root.queues" in services['configurations']["capacity-scheduler"]["properties"]:
-        yarn_queues =  services['configurations']["capacity-scheduler"]["properties"]["yarn.scheduler.capacity.root.queues"]
+    if "capacity-scheduler" in services['configurations'] and "capacity-scheduler" in services['configurations']["capacity-scheduler"]["properties"]:
+      properties = str(services['configurations']["capacity-scheduler"]["properties"]["capacity-scheduler"]).split('\n')
+      for property in properties:
+        key,sep,value = property.partition("=")
+        capacitySchedulerProperties[key] = value
+    if "yarn.scheduler.capacity.root.queues" in capacitySchedulerProperties:
+      yarn_queues = str(capacitySchedulerProperties["yarn.scheduler.capacity.root.queues"])
     # Interactive Queues property attributes
     putHiveServerPropertyAttribute = self.putPropertyAttribute(configurations, "hiveserver2-site")
     toProcessQueues = yarn_queues.split(",")

http://git-wip-us.apache.org/repos/asf/ambari/blob/4d3839c7/ambari-server/src/main/resources/stacks/HDP/2.3/services/stack_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/services/stack_advisor.py b/ambari-server/src/main/resources/stacks/HDP/2.3/services/stack_advisor.py
index 34e4cfa..b354378 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/services/stack_advisor.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/services/stack_advisor.py
@@ -943,31 +943,31 @@ class HDP23StackAdvisor(HDP22StackAdvisor):
                                 "HAWQ Master or Standby Master cannot use the port 5432 when installed on the same host as the Ambari Server. Ambari Postgres DB uses the same port. Please choose a different value (e.g. 10432)")})
 
     # 2. Check if any data directories are pointing to root dir '/'
-    directories = {
-                    'hawq_master_directory': 'HAWQ Master directory',
-                    'hawq_master_temp_directory': 'HAWQ Master temp directory',
-                    'hawq_segment_directory': 'HAWQ Segment directory',
-                    'hawq_segment_temp_directory': 'HAWQ Segment temp directory'
-                  }
-    for property_name, display_name in directories.iteritems():
-      self.validateIfRootDir(properties, validationItems, property_name, display_name)
+    prop_name = 'hawq_master_directory'
+    display_name = 'HAWQ Master directory'
+    self.validateIfRootDir (properties, validationItems, prop_name, display_name)
+
+    prop_name = 'hawq_master_temp_directory'
+    display_name = 'HAWQ Master temp directory'
+    self.validateIfRootDir (properties, validationItems, prop_name, display_name)
+
+    prop_name = 'hawq_segment_directory'
+    display_name = 'HAWQ Segment directory'
+    self.validateIfRootDir (properties, validationItems, prop_name, display_name)
+
+    prop_name = 'hawq_segment_temp_directory'
+    display_name = 'HAWQ Segment temp directory'
+    self.validateIfRootDir (properties, validationItems, prop_name, display_name)
 
     # 3. Check YARN RM address properties
-    YARN = "YARN"
     servicesList = [service["StackServices"]["service_name"] for service in services["services"]]
-    if YARN in servicesList and "yarn-site" in configurations:
+    if "YARN" in servicesList and "yarn-site" in configurations:
       yarn_site = getSiteProperties(configurations, "yarn-site")
       for hs_prop, ys_prop in self.getHAWQYARNPropertyMapping().items():
         if hs_prop in hawq_site and ys_prop in yarn_site and hawq_site[hs_prop] != yarn_site[ys_prop]:
           message = "Expected value: {0} (this property should have the same value as the property {1} in yarn-site)".format(yarn_site[ys_prop], ys_prop)
           validationItems.append({"config-name": hs_prop, "item": self.getWarnItem(message)})
 
-    # 4. Check HAWQ Resource Manager type
-    HAWQ_GLOBAL_RM_TYPE = "hawq_global_rm_type"
-    if YARN not in servicesList and HAWQ_GLOBAL_RM_TYPE in hawq_site and hawq_site[HAWQ_GLOBAL_RM_TYPE].upper() == YARN:
-      message = "{0} must be set to none if YARN service is not installed".format(HAWQ_GLOBAL_RM_TYPE)
-      validationItems.append({"config-name": HAWQ_GLOBAL_RM_TYPE, "item": self.getErrorItem(message)})
-
     return self.toConfigurationValidationProblems(validationItems, "hawq-site")
   
   

http://git-wip-us.apache.org/repos/asf/ambari/blob/4d3839c7/ambari-server/src/main/resources/stacks/HDP/2.4/services/HIVE/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.4/services/HIVE/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.4/services/HIVE/metainfo.xml
index 93728fd..ee973ed 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.4/services/HIVE/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.4/services/HIVE/metainfo.xml
@@ -26,7 +26,7 @@
             <name>HIVE_SERVER_INTERACTIVE</name>
             <displayName>HiveServer2 Interactive</displayName>
             <category>MASTER</category>
-            <cardinality>0+</cardinality>
+            <cardinality>1</cardinality>
             <versionAdvertised>true</versionAdvertised>
             <clientsToUpdateConfigs></clientsToUpdateConfigs>
             <dependencies>
@@ -35,7 +35,7 @@
                 <scope>cluster</scope>
                 <auto-deploy>
                   <enabled>true</enabled>
-                  <co-locate>HIVE/HIVE_SERVER_INTERACTIVE</co-locate>
+                  <co-locate>HIVE/HIVE_SERVER</co-locate>
                 </auto-deploy>
               </dependency>
               <dependency>
@@ -43,15 +43,6 @@
                 <scope>host</scope>
                 <auto-deploy>
                   <enabled>true</enabled>
-                  <co-locate>HIVE/HIVE_SERVER_INTERACTIVE</co-locate>
-                </auto-deploy>
-              </dependency>
-              <dependency>
-                <name>HDFS/HDFS_CLIENT</name>
-                <scope>host</scope>
-                <auto-deploy>
-                  <enabled>true</enabled>
-                  <co-locate>HIVE/HIVE_SERVER_INTERACTIVE</co-locate>
                 </auto-deploy>
               </dependency>
               <dependency>
@@ -59,7 +50,6 @@
                 <scope>host</scope>
                 <auto-deploy>
                   <enabled>true</enabled>
-                  <co-locate>HIVE/HIVE_SERVER_INTERACTIVE</co-locate>
                 </auto-deploy>
               </dependency>
               <dependency>
@@ -67,26 +57,9 @@
                 <scope>host</scope>
                 <auto-deploy>
                   <enabled>true</enabled>
-                  <co-locate>HIVE/HIVE_SERVER_INTERACTIVE</co-locate>
-                </auto-deploy>
-              </dependency>
-              <dependency>
-                <name>PIG/PIG</name>
-                <scope>host</scope>
-                <auto-deploy>
-                  <enabled>true</enabled>
-                  <co-locate>HIVE/HIVE_SERVER_INTERACTIVE</co-locate>
-                </auto-deploy>
-              </dependency>
-              <dependency>
-                <name>SLIDER/SLIDER</name>
-                <scope>host</scope>
-                <auto-deploy>
-                  <enabled>true</enabled>
-                  <co-locate>HIVE/HIVE_SERVER_INTERACTIVE</co-locate>
                 </auto-deploy>
               </dependency>
-            </dependencies>
+              </dependencies>
                 <commandScript>
                   <script>scripts/hive_server_interactive.py</script>
                   <scriptType>PYTHON</scriptType>
@@ -97,14 +70,6 @@
                 </configuration-dependencies>
           </component>
         </components>
-      <requiredServices>
-        <service>ZOOKEEPER</service>
-        <service>HDFS</service>
-        <service>YARN</service>
-        <service>TEZ</service>
-        <service>PIG</service>
-        <service>SLIDER</service>
-      </requiredServices>
     </service>
   </services>
 </metainfo>

http://git-wip-us.apache.org/repos/asf/ambari/blob/4d3839c7/ambari-server/src/test/java/org/apache/ambari/server/actionmanager/TestActionScheduler.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/actionmanager/TestActionScheduler.java b/ambari-server/src/test/java/org/apache/ambari/server/actionmanager/TestActionScheduler.java
index af6fb9b..bc4d397 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/actionmanager/TestActionScheduler.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/actionmanager/TestActionScheduler.java
@@ -607,8 +607,8 @@ public class TestActionScheduler {
     doAnswer(new Answer<List<HostRoleCommand>>() {
       @Override
       public List<HostRoleCommand> answer(InvocationOnMock invocation) throws Throwable {
-        String role = (String) invocation.getArguments()[0];
-        HostRoleStatus status = (HostRoleStatus) invocation.getArguments()[1];
+        String role = (String) invocation.getArguments()[1];
+        HostRoleStatus status = (HostRoleStatus) invocation.getArguments()[2];
 
         HostRoleCommand task = s.getHostRoleCommand(null, role);
 
@@ -618,7 +618,7 @@ public class TestActionScheduler {
           return Collections.emptyList();
         }
       }
-    }).when(db).getTasksByRoleAndStatus(anyString(), any(HostRoleStatus.class));
+    }).when(db).getTasksByHostRoleAndStatus(anyString(), anyString(), any(HostRoleStatus.class));
 
     ServerActionExecutor.init(injector);
     ActionScheduler scheduler = new ActionScheduler(100, 50, db, aq, fsm, 3,
@@ -762,8 +762,8 @@ public class TestActionScheduler {
     doAnswer(new Answer<List<HostRoleCommand>>() {
       @Override
       public List<HostRoleCommand> answer(InvocationOnMock invocation) throws Throwable {
-        String role = (String) invocation.getArguments()[0];
-        HostRoleStatus status = (HostRoleStatus) invocation.getArguments()[1];
+        String role = (String) invocation.getArguments()[1];
+        HostRoleStatus status = (HostRoleStatus) invocation.getArguments()[2];
 
         HostRoleCommand task = s.getHostRoleCommand(null, role);
 
@@ -774,7 +774,7 @@ public class TestActionScheduler {
         }
 
       }
-    }).when(db).getTasksByRoleAndStatus(anyString(), any(HostRoleStatus.class));
+    }).when(db).getTasksByHostRoleAndStatus(anyString(), anyString(), any(HostRoleStatus.class));
 
     ServerActionExecutor.init(injector);
     ActionScheduler scheduler = new ActionScheduler(100, 50, db, aq, fsm, 3,
@@ -843,8 +843,8 @@ public class TestActionScheduler {
     doAnswer(new Answer<List<HostRoleCommand>>() {
       @Override
       public List<HostRoleCommand> answer(InvocationOnMock invocation) throws Throwable {
-        String role = (String) invocation.getArguments()[0];
-        HostRoleStatus status = (HostRoleStatus) invocation.getArguments()[1];
+        String role = (String) invocation.getArguments()[1];
+        HostRoleStatus status = (HostRoleStatus) invocation.getArguments()[2];
 
         HostRoleCommand task = s.getHostRoleCommand(null, role);
 
@@ -854,7 +854,7 @@ public class TestActionScheduler {
           return Collections.emptyList();
         }
       }
-    }).when(db).getTasksByRoleAndStatus(anyString(), any(HostRoleStatus.class));
+    }).when(db).getTasksByHostRoleAndStatus(anyString(), anyString(), any(HostRoleStatus.class));
 
     ActionScheduler scheduler = new ActionScheduler(100, 50, db, aq, fsm, 3,
         new HostsMap((String) null), unitOfWork, null, conf);
@@ -1951,8 +1951,8 @@ public class TestActionScheduler {
     doAnswer(new Answer<List<HostRoleCommand>>() {
       @Override
       public List<HostRoleCommand> answer(InvocationOnMock invocation) throws Throwable {
-        String role = (String) invocation.getArguments()[0];
-        HostRoleStatus status = (HostRoleStatus) invocation.getArguments()[1];
+        String role = (String) invocation.getArguments()[1];
+        HostRoleStatus status = (HostRoleStatus) invocation.getArguments()[2];
 
         HostRoleCommand task = s.getHostRoleCommand(null, role);
 
@@ -1962,7 +1962,7 @@ public class TestActionScheduler {
           return Collections.emptyList();
         }
       }
-    }).when(db).getTasksByRoleAndStatus(anyString(), any(HostRoleStatus.class));
+    }).when(db).getTasksByHostRoleAndStatus(anyString(), anyString(), any(HostRoleStatus.class));
 
     doAnswer(new Answer<HostRoleCommand>() {
       @Override

http://git-wip-us.apache.org/repos/asf/ambari/blob/4d3839c7/ambari-server/src/test/java/org/apache/ambari/server/agent/AgentResourceTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/agent/AgentResourceTest.java b/ambari-server/src/test/java/org/apache/ambari/server/agent/AgentResourceTest.java
index 6cb9e6f..510e1fb 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/agent/AgentResourceTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/agent/AgentResourceTest.java
@@ -36,7 +36,6 @@ import org.apache.ambari.server.agent.rest.AgentResource;
 import org.apache.ambari.server.api.services.AmbariMetaInfo;
 import org.apache.ambari.server.events.publishers.AmbariEventPublisher;
 import org.apache.ambari.server.orm.DBAccessor;
-import org.apache.ambari.server.orm.dao.HostRoleCommandDAO;
 import org.apache.ambari.server.security.SecurityHelper;
 import org.apache.ambari.server.security.SecurityHelperImpl;
 import org.apache.ambari.server.stack.StackManagerFactory;
@@ -309,7 +308,6 @@ public class AgentResourceTest extends RandomPortJerseyTest {
       bind(HeartBeatHandler.class).toInstance(handler);
       bind(AmbariMetaInfo.class).toInstance(ambariMetaInfo);
       bind(DBAccessor.class).toInstance(mock(DBAccessor.class));
-      bind(HostRoleCommandDAO.class).toInstance(mock(HostRoleCommandDAO.class));
     }
 
     private void installDependencies() {

http://git-wip-us.apache.org/repos/asf/ambari/blob/4d3839c7/ambari-server/src/test/java/org/apache/ambari/server/configuration/ConfigurationTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/configuration/ConfigurationTest.java b/ambari-server/src/test/java/org/apache/ambari/server/configuration/ConfigurationTest.java
index 3ecb5aa..4e236f3 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/configuration/ConfigurationTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/configuration/ConfigurationTest.java
@@ -563,99 +563,4 @@ public class ConfigurationTest {
     Assert.assertEquals(44, configuration.getPropertyProvidersThreadPoolMaxSize());
   }
 
-
-  public void testGetHostRoleCommandStatusSummaryCacheSize() throws  Exception {
-    // Given
-    final Properties ambariProperties = new Properties();
-    final Configuration configuration = new Configuration(ambariProperties);
-    ambariProperties.setProperty(Configuration.SERVER_HRC_STATUS_SUMMARY_CACHE_SIZE, "3000");
-
-    // When
-    long actualCacheSize = configuration.getHostRoleCommandStatusSummaryCacheSize();
-
-    // Then
-    Assert.assertEquals(actualCacheSize, 3000L);
-  }
-
-  @Test
-  public void testGetHostRoleCommandStatusSummaryCacheSizeDefault() throws  Exception {
-    // Given
-    final Properties ambariProperties = new Properties();
-    final Configuration configuration = new Configuration(ambariProperties);
-
-    // When
-    long actualCacheSize = configuration.getHostRoleCommandStatusSummaryCacheSize();
-
-    // Then
-    Assert.assertEquals(actualCacheSize, Configuration.SERVER_HRC_STATUS_SUMMARY_CACHE_SIZE_DEFAULT);
-  }
-
-  @Test
-  public void testGetHostRoleCommandStatusSummaryCacheExpiryDuration() throws  Exception {
-    // Given
-    final Properties ambariProperties = new Properties();
-    final Configuration configuration = new Configuration(ambariProperties);
-    ambariProperties.setProperty(Configuration.SERVER_HRC_STATUS_SUMMARY_CACHE_EXPIRY_DURATION, "60");
-
-    // When
-    long actualCacheExpiryDuration = configuration.getHostRoleCommandStatusSummaryCacheExpiryDuration();
-
-    // Then
-    Assert.assertEquals(actualCacheExpiryDuration, 60L);
-  }
-
-  @Test
-  public void testGetHostRoleCommandStatusSummaryCacheExpiryDurationDefault() throws  Exception {
-    // Given
-    final Properties ambariProperties = new Properties();
-    final Configuration configuration = new Configuration(ambariProperties);
-
-    // When
-    long actualCacheExpiryDuration = configuration.getHostRoleCommandStatusSummaryCacheExpiryDuration();
-
-    // Then
-    Assert.assertEquals(actualCacheExpiryDuration, Configuration.SERVER_HRC_STATUS_SUMMARY_CACHE_EXPIRY_DURATION_DEFAULT);
-  }
-
-  @Test
-  public void testGetHostRoleCommandStatusSummaryCacheEnabled() throws  Exception {
-    // Given
-    final Properties ambariProperties = new Properties();
-    final Configuration configuration = new Configuration(ambariProperties);
-    ambariProperties.setProperty(Configuration.SERVER_HRC_STATUS_SUMMARY_CACHE_ENABLED, "true");
-
-    // When
-    boolean actualCacheEnabledConfig = configuration.getHostRoleCommandStatusSummaryCacheEnabled();
-
-    // Then
-    Assert.assertEquals(actualCacheEnabledConfig, true);
-  }
-
-  @Test
-  public void testGetHostRoleCommandStatusSummaryCacheDisabled() throws  Exception {
-    // Given
-    final Properties ambariProperties = new Properties();
-    final Configuration configuration = new Configuration(ambariProperties);
-    ambariProperties.setProperty(Configuration.SERVER_HRC_STATUS_SUMMARY_CACHE_ENABLED, "false");
-
-    // When
-    boolean actualCacheEnabledConfig = configuration.getHostRoleCommandStatusSummaryCacheEnabled();
-
-    // Then
-    Assert.assertEquals(actualCacheEnabledConfig, false);
-  }
-
-  @Test
-  public void testGetHostRoleCommandStatusSummaryCacheEnabledDefault() throws  Exception {
-    // Given
-    final Properties ambariProperties = new Properties();
-    final Configuration configuration = new Configuration(ambariProperties);
-
-    // When
-    boolean actualCacheEnabledConfig = configuration.getHostRoleCommandStatusSummaryCacheEnabled();
-
-    // Then
-    Assert.assertEquals(actualCacheEnabledConfig, Configuration.SERVER_HRC_STATUS_SUMMARY_CACHE_ENABLED_DEFAULT);
-  }
-
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/4d3839c7/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java
index 992150c..7b26f23 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java
@@ -5679,7 +5679,6 @@ public class AmbariManagementControllerTest {
       clusters.getCluster(clusterName).getService(serviceName)
       .getServiceComponents().values()) {
       Assert.assertEquals(State.INSTALLED, sc.getDesiredState());
-      Assert.assertFalse(sc.isRecoveryEnabled()); // default value of recoveryEnabled
       for (ServiceComponentHost sch : sc.getServiceComponentHosts().values()) {
         Assert.assertEquals(State.INSTALLED, sch.getDesiredState());
         Assert.assertEquals(State.INIT, sch.getState());
@@ -5696,7 +5695,6 @@ public class AmbariManagementControllerTest {
     for (ServiceComponent sc :
       clusters.getCluster(clusterName).getService(serviceName)
           .getServiceComponents().values()) {
-      sc.setRecoveryEnabled(true);
       for (ServiceComponentHost sch : sc.getServiceComponentHosts().values()) {
         sch.setState(State.INSTALLED);
       }
@@ -5716,7 +5714,6 @@ public class AmbariManagementControllerTest {
       clusters.getCluster(clusterName).getService(serviceName)
           .getServiceComponents().values()) {
       Assert.assertEquals(State.INSTALLED, sc.getDesiredState());
-      Assert.assertTrue(sc.isRecoveryEnabled());
       for (ServiceComponentHost sch : sc.getServiceComponentHosts().values()) {
         Assert.assertEquals(State.INSTALLED, sch.getDesiredState());
         Assert.assertEquals(State.INSTALLED, sch.getState());

http://git-wip-us.apache.org/repos/asf/ambari/blob/4d3839c7/ambari-server/src/test/java/org/apache/ambari/server/controller/KerberosHelperTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/KerberosHelperTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/KerberosHelperTest.java
index f6027f3..2dcde00 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/KerberosHelperTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/KerberosHelperTest.java
@@ -42,7 +42,6 @@ import org.apache.ambari.server.controller.spi.Resource;
 import org.apache.ambari.server.controller.spi.ResourceProvider;
 import org.apache.ambari.server.metadata.RoleCommandOrder;
 import org.apache.ambari.server.orm.DBAccessor;
-import org.apache.ambari.server.orm.dao.HostRoleCommandDAO;
 import org.apache.ambari.server.security.SecurityHelper;
 import org.apache.ambari.server.security.credential.PrincipalKeyCredential;
 import org.apache.ambari.server.security.encryption.CredentialStoreService;
@@ -216,7 +215,6 @@ public class KerberosHelperTest extends EasyMockSupport {
         bind(CreatePrincipalsServerAction.class).toInstance(createMock(CreatePrincipalsServerAction.class));
         bind(CreateKeytabFilesServerAction.class).toInstance(createMock(CreateKeytabFilesServerAction.class));
         bind(StackAdvisorHelper.class).toInstance(createMock(StackAdvisorHelper.class));
-        bind(HostRoleCommandDAO.class).toInstance(createNiceMock(HostRoleCommandDAO.class));
       }
     });
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/4d3839c7/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ComponentResourceProviderTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ComponentResourceProviderTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ComponentResourceProviderTest.java
index d24ca09..f38fab1 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ComponentResourceProviderTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ComponentResourceProviderTest.java
@@ -225,14 +225,11 @@ public class ComponentResourceProviderTest {
     expect(service.getServiceComponents()).andReturn(serviceComponentMap).anyTimes();
 
     expect(serviceComponent1.convertToResponse()).andReturn(
-      new ServiceComponentResponse(100L, "Cluster100", "Service100", "Component100", null, "", 1, 1, 0,
-              true /* recovery enabled */));
+      new ServiceComponentResponse(100L, "Cluster100", "Service100", "Component100", null, "", 1, 1, 0));
     expect(serviceComponent2.convertToResponse()).andReturn(
-      new ServiceComponentResponse(100L, "Cluster100", "Service100", "Component101", null, "", 1, 1, 0,
-              false /* recovery not enabled */));
+      new ServiceComponentResponse(100L, "Cluster100", "Service100", "Component101", null, "", 1, 1, 0));
     expect(serviceComponent3.convertToResponse()).andReturn(
-      new ServiceComponentResponse(100L, "Cluster100", "Service100", "Component102", null, "", 1, 1, 0,
-              true /* recovery enabled */));
+      new ServiceComponentResponse(100L, "Cluster100", "Service100", "Component102", null, "", 1, 1, 0));
 
     expect(ambariMetaInfo.getComponent((String) anyObject(),
         (String) anyObject(), (String) anyObject(), (String) anyObject()))
@@ -261,7 +258,6 @@ public class ComponentResourceProviderTest {
     propertyIds.add(ComponentResourceProvider.COMPONENT_TOTAL_COUNT_PROPERTY_ID);
     propertyIds.add(ComponentResourceProvider.COMPONENT_STARTED_COUNT_PROPERTY_ID);
     propertyIds.add(ComponentResourceProvider.COMPONENT_INSTALLED_COUNT_PROPERTY_ID);
-    propertyIds.add(ComponentResourceProvider.COMPONENT_RECOVERY_ENABLED_ID);
 
     Predicate predicate = new PredicateBuilder()
       .property(ComponentResourceProvider.COMPONENT_CLUSTER_NAME_PROPERTY_ID)
@@ -286,8 +282,6 @@ public class ComponentResourceProviderTest {
         ComponentResourceProvider.COMPONENT_STARTED_COUNT_PROPERTY_ID));
       Assert.assertEquals(0, resource.getPropertyValue(
         ComponentResourceProvider.COMPONENT_INSTALLED_COUNT_PROPERTY_ID));
-      Assert.assertEquals(String.valueOf(true), resource.getPropertyValue(
-        ComponentResourceProvider.COMPONENT_RECOVERY_ENABLED_ID));
     }
 
     // verify
@@ -370,14 +364,11 @@ public class ComponentResourceProviderTest {
     expect(component3Info.getCategory()).andReturn(null);
 
     expect(serviceComponent1.convertToResponse()).andReturn(
-      new ServiceComponentResponse(100L, "Cluster100", "Service100", "Component101", null, "", 1, 0, 1,
-              false /* recovery not enabled */));
+      new ServiceComponentResponse(100L, "Cluster100", "Service100", "Component101", null, "", 1, 0, 1));
     expect(serviceComponent2.convertToResponse()).andReturn(
-      new ServiceComponentResponse(100L, "Cluster100", "Service100", "Component102", null, "", 1, 0, 1,
-              false /* recovery not enabled */));
+      new ServiceComponentResponse(100L, "Cluster100", "Service100", "Component102", null, "", 1, 0, 1));
     expect(serviceComponent3.convertToResponse()).andReturn(
-      new ServiceComponentResponse(100L, "Cluster100", "Service100", "Component103", null, "", 1, 0, 1,
-              false /* recovery not enabled */));
+      new ServiceComponentResponse(100L, "Cluster100", "Service100", "Component103", null, "", 1, 0, 1));
     expect(serviceComponent1.getDesiredState()).andReturn(State.INSTALLED).anyTimes();
     expect(serviceComponent2.getDesiredState()).andReturn(State.INSTALLED).anyTimes();
     expect(serviceComponent3.getDesiredState()).andReturn(State.INSTALLED).anyTimes();
@@ -421,7 +412,6 @@ public class ComponentResourceProviderTest {
 
     Map<String, Object> properties = new LinkedHashMap<String, Object>();
 
-    properties.put(ComponentResourceProvider.COMPONENT_RECOVERY_ENABLED_ID, String.valueOf(true) /* recovery enabled */);
     properties.put(ComponentResourceProvider.COMPONENT_STATE_PROPERTY_ID, "STARTED");
     properties.put(ComponentResourceProvider.COMPONENT_CLUSTER_NAME_PROPERTY_ID, "Cluster100");
 
@@ -617,7 +607,7 @@ public class ComponentResourceProviderTest {
 
     // requests
     ServiceComponentRequest request1 = new ServiceComponentRequest("cluster1", "service1", "component1",
-        null, String.valueOf(true /* recovery enabled */));
+        null);
 
     Set<ServiceComponentRequest> setRequests = new HashSet<ServiceComponentRequest>();
     setRequests.add(request1);
@@ -677,15 +667,14 @@ public class ComponentResourceProviderTest {
 
     // requests
     ServiceComponentRequest request1 = new ServiceComponentRequest("cluster1", "service1", "component1",
-        null, String.valueOf(true /* recovery enabled */));
+        null);
     ServiceComponentRequest request2 = new ServiceComponentRequest("cluster1", "service1", "component2",
-        null, String.valueOf(true /* recovery enabled */));
+        null);
     ServiceComponentRequest request3 = new ServiceComponentRequest("cluster1", "service1", "component3",
-        null, String.valueOf(true /* recovery enabled */));
+        null);
     ServiceComponentRequest request4 = new ServiceComponentRequest("cluster1", "service1", "component4",
-        null, String.valueOf(true /* recovery enabled */));
-    ServiceComponentRequest request5 = new ServiceComponentRequest("cluster1", "service2", null, null,
-              String.valueOf(true /* recovery enabled */));
+        null);
+    ServiceComponentRequest request5 = new ServiceComponentRequest("cluster1", "service2", null, null);
 
     Set<ServiceComponentRequest> setRequests = new HashSet<ServiceComponentRequest>();
     setRequests.add(request1);
@@ -769,7 +758,7 @@ public class ComponentResourceProviderTest {
 
     // requests
     ServiceComponentRequest request1 = new ServiceComponentRequest("cluster1", "service1", "component1",
-        null, String.valueOf(true /* recovery enabled */));
+        null);
 
     Set<ServiceComponentRequest> setRequests = new HashSet<ServiceComponentRequest>();
     setRequests.add(request1);

http://git-wip-us.apache.org/repos/asf/ambari/blob/4d3839c7/ambari-server/src/test/java/org/apache/ambari/server/stack/StackManagerTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/stack/StackManagerTest.java b/ambari-server/src/test/java/org/apache/ambari/server/stack/StackManagerTest.java
index d7a15e2..455652b 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/stack/StackManagerTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/stack/StackManagerTest.java
@@ -31,7 +31,6 @@ import static org.junit.Assert.assertTrue;
 import java.io.File;
 import java.io.FileReader;
 import java.lang.reflect.Type;
-import java.net.URL;
 import java.util.ArrayList;
 import java.util.Collection;
 import java.util.HashMap;
@@ -63,7 +62,6 @@ import org.apache.commons.lang.StringUtils;
 import org.junit.BeforeClass;
 import org.junit.Ignore;
 import org.junit.Test;
-import org.springframework.util.Assert;
 
 /**
  * StackManager unit tests.
@@ -643,16 +641,12 @@ public class StackManagerTest {
         stack.getKerberosDescriptorFileLocation());
   }
 
+  @Ignore
   @Test
   public void testMetricsLoaded() throws Exception {
 
-    URL rootDirectoryURL = StackManagerTest.class.getResource("/");
-    Assert.notNull(rootDirectoryURL);
-
-    File resourcesDirectory = new File(new File(rootDirectoryURL.getFile()).getParentFile().getParentFile(), "src/main/resources");
-
-    File stackRoot = new File(resourcesDirectory, "stacks");
-    File commonServices = new File(resourcesDirectory, "common-services");
+    String stackRoot = ClassLoader.getSystemClassLoader().getResource("stacks").getPath().replace("test-classes","classes");
+    String commonServices = ClassLoader.getSystemClassLoader().getResource("common-services").getPath().replace("test-classes","classes");
 
     MetainfoDAO metaInfoDao = createNiceMock(MetainfoDAO.class);
     StackDAO stackDao = createNiceMock(StackDAO.class);
@@ -666,7 +660,7 @@ public class StackManagerTest {
 
     OsFamily osFamily = new OsFamily(config);
 
-    StackManager stackManager = new StackManager(stackRoot, commonServices,
+    StackManager stackManager = new StackManager(new File(stackRoot), new File(commonServices),
             osFamily, metaInfoDao, actionMetadata, stackDao);
 
     for (StackInfo stackInfo : stackManager.getStacks()) {
@@ -688,15 +682,12 @@ public class StackManagerTest {
     }
   }
 
+  @Ignore
   @Test
   public void testServicesWithRangerPluginRoleCommandOrder() throws AmbariException {
-    URL rootDirectoryURL = StackManagerTest.class.getResource("/");
-    Assert.notNull(rootDirectoryURL);
-
-    File resourcesDirectory = new File(new File(rootDirectoryURL.getFile()).getParentFile().getParentFile(), "src/main/resources");
-
-    File stackRoot = new File(resourcesDirectory, "stacks");
-    File commonServices = new File(resourcesDirectory, "common-services");
+    // Given
+    String stackRoot = ClassLoader.getSystemClassLoader().getResource("stacks").getPath().replace("test-classes","classes");
+    String commonServices = ClassLoader.getSystemClassLoader().getResource("common-services").getPath().replace("test-classes","classes");
 
     MetainfoDAO metaInfoDao = createNiceMock(MetainfoDAO.class);
     StackDAO stackDao = createNiceMock(StackDAO.class);
@@ -710,7 +701,7 @@ public class StackManagerTest {
 
     OsFamily osFamily = new OsFamily(config);
 
-    StackManager stackManager = new StackManager(stackRoot, commonServices, osFamily, metaInfoDao, actionMetadata, stackDao);
+    StackManager stackManager = new StackManager(new File(stackRoot), new File(commonServices), osFamily, metaInfoDao, actionMetadata, stackDao);
 
     String rangerUserSyncRoleCommand = Role.RANGER_USERSYNC + "-" + RoleCommand.START;
     String rangerAdminRoleCommand = Role.RANGER_ADMIN + "-" + RoleCommand.START;
@@ -792,6 +783,14 @@ public class StackManagerTest {
 
     assertTrue(rangerUserSyncRoleCommand + " should be dependent of " + rangerAdminRoleCommand, rangerUserSyncBlockers.contains(rangerAdminRoleCommand));
     assertTrue(rangerUserSyncRoleCommand + " should be dependent of " + kmsRoleCommand, rangerUserSyncBlockers.contains(kmsRoleCommand));
+
+    // Zookeeper Server
+    ArrayList<String> zookeeperBlockers = (ArrayList<String>)generalDeps.get(zookeeperServerRoleCommand);
+
+    assertTrue(zookeeperServerRoleCommand + " should be dependent of " + rangerUserSyncRoleCommand, zookeeperBlockers.contains(rangerUserSyncRoleCommand));
+
   }
+
+
   //todo: component override assertions
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/4d3839c7/ambari-server/src/test/java/org/apache/ambari/server/state/ConfigHelperTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/state/ConfigHelperTest.java b/ambari-server/src/test/java/org/apache/ambari/server/state/ConfigHelperTest.java
index 9fe0fc3..98424b7 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/state/ConfigHelperTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/state/ConfigHelperTest.java
@@ -48,7 +48,6 @@ import org.apache.ambari.server.controller.spi.ClusterController;
 import org.apache.ambari.server.orm.DBAccessor;
 import org.apache.ambari.server.orm.GuiceJpaInitializer;
 import org.apache.ambari.server.orm.InMemoryDefaultTestModule;
-import org.apache.ambari.server.orm.dao.HostRoleCommandDAO;
 import org.apache.ambari.server.security.SecurityHelper;
 import org.apache.ambari.server.security.TestAuthenticationFactory;
 import org.apache.ambari.server.stack.StackManagerFactory;
@@ -755,7 +754,6 @@ public class ConfigHelperTest {
           bind(Clusters.class).toInstance(createNiceMock(ClustersImpl.class));
           bind(ClusterController.class).toInstance(clusterController);
           bind(StackManagerFactory.class).toInstance(createNiceMock(StackManagerFactory.class));
-          bind(HostRoleCommandDAO.class).toInstance(createNiceMock(HostRoleCommandDAO.class));
         }
       });
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/4d3839c7/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog222Test.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog222Test.java b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog222Test.java
index 077df33..6061e06 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog222Test.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog222Test.java
@@ -19,12 +19,12 @@
 package org.apache.ambari.server.upgrade;
 
 
-import java.lang.reflect.Method;
-import java.util.HashMap;
-import java.util.Map;
-
-import javax.persistence.EntityManager;
-
+import com.google.common.collect.Maps;
+import com.google.gson.Gson;
+import com.google.inject.Guice;
+import com.google.inject.Injector;
+import com.google.inject.Provider;
+import com.google.inject.persist.PersistService;
 import org.apache.ambari.server.actionmanager.ActionManager;
 import org.apache.ambari.server.api.services.AmbariMetaInfo;
 import org.apache.ambari.server.controller.AmbariManagementController;
@@ -33,7 +33,6 @@ import org.apache.ambari.server.controller.ConfigurationRequest;
 import org.apache.ambari.server.controller.ConfigurationResponse;
 import org.apache.ambari.server.controller.KerberosHelper;
 import org.apache.ambari.server.controller.MaintenanceStateHelper;
-import org.apache.ambari.server.orm.DBAccessor;
 import org.apache.ambari.server.orm.GuiceJpaInitializer;
 import org.apache.ambari.server.orm.InMemoryDefaultTestModule;
 import org.apache.ambari.server.orm.dao.StackDAO;
@@ -41,7 +40,6 @@ import org.apache.ambari.server.orm.entities.StackEntity;
 import org.apache.ambari.server.state.Cluster;
 import org.apache.ambari.server.state.Clusters;
 import org.apache.ambari.server.state.Config;
-import org.apache.ambari.server.state.stack.OsFamily;
 import org.easymock.Capture;
 import org.easymock.EasyMock;
 import org.easymock.EasyMockSupport;
@@ -49,14 +47,10 @@ import org.junit.After;
 import org.junit.Before;
 import org.junit.Test;
 
-import com.google.common.collect.Maps;
-import com.google.gson.Gson;
-import com.google.inject.Binder;
-import com.google.inject.Guice;
-import com.google.inject.Injector;
-import com.google.inject.Module;
-import com.google.inject.Provider;
-import com.google.inject.persist.PersistService;
+import javax.persistence.EntityManager;
+import java.lang.reflect.Method;
+import java.util.HashMap;
+import java.util.Map;
 
 import static org.easymock.EasyMock.anyObject;
 import static org.easymock.EasyMock.anyString;
@@ -64,7 +58,6 @@ import static org.easymock.EasyMock.capture;
 import static org.easymock.EasyMock.createMockBuilder;
 import static org.easymock.EasyMock.createNiceMock;
 import static org.easymock.EasyMock.createStrictMock;
-import static org.easymock.EasyMock.eq;
 import static org.easymock.EasyMock.expect;
 import static org.easymock.EasyMock.expectLastCall;
 import static org.easymock.EasyMock.replay;
@@ -108,7 +101,6 @@ public class UpgradeCatalog222Test {
     Method updateAlerts = UpgradeCatalog222.class.getDeclaredMethod("updateAlerts");
     Method updateStormConfigs = UpgradeCatalog222.class.getDeclaredMethod("updateStormConfigs");
     Method updateAMSConfigs = UpgradeCatalog222.class.getDeclaredMethod("updateAMSConfigs");
-    Method updateHostRoleCommands = UpgradeCatalog222.class.getDeclaredMethod("updateHostRoleCommands");
 
 
     UpgradeCatalog222 upgradeCatalog222 = createMockBuilder(UpgradeCatalog222.class)
@@ -116,7 +108,6 @@ public class UpgradeCatalog222Test {
             .addMockedMethod(updateAlerts)
             .addMockedMethod(updateStormConfigs)
             .addMockedMethod(updateAMSConfigs)
-            .addMockedMethod(updateHostRoleCommands)
             .createMock();
 
     upgradeCatalog222.addNewConfigurationsFromXml();
@@ -127,8 +118,6 @@ public class UpgradeCatalog222Test {
     expectLastCall().once();
     upgradeCatalog222.updateAMSConfigs();
     expectLastCall().once();
-    upgradeCatalog222.updateHostRoleCommands();
-    expectLastCall().once();
 
     replay(upgradeCatalog222);
 
@@ -214,28 +203,4 @@ public class UpgradeCatalog222Test {
 
   }
 
-  @Test
-  public void testUpdateHostRoleCommands() throws Exception {
-    final DBAccessor dbAccessor = createNiceMock(DBAccessor.class);
-    dbAccessor.createIndex(eq("idx_hrc_status"), eq("host_role_command"), eq("status"), eq("role"));
-    expectLastCall().once();
-
-    replay(dbAccessor);
-
-    Module module = new Module() {
-      @Override
-      public void configure(Binder binder) {
-        binder.bind(DBAccessor.class).toInstance(dbAccessor);
-        binder.bind(OsFamily.class).toInstance(createNiceMock(OsFamily.class));
-      }
-    };
-
-    Injector injector = Guice.createInjector(module);
-    UpgradeCatalog222 upgradeCatalog222 = injector.getInstance(UpgradeCatalog222.class);
-    upgradeCatalog222.updateHostRoleCommands();
-
-
-    verify(dbAccessor);
-  }
-
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/4d3839c7/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog240Test.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog240Test.java b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog240Test.java
index f5fafbc..95ae8d8 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog240Test.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog240Test.java
@@ -103,7 +103,6 @@ public class UpgradeCatalog240Test {
   @Test
   public void testExecuteDDLUpdates() throws SQLException, AmbariException {
     Capture<DBAccessor.DBColumnInfo> capturedColumnInfo = newCapture();
-    Capture<DBAccessor.DBColumnInfo> capturedScColumnInfo = newCapture();
     final DBAccessor dbAccessor = createStrictMock(DBAccessor.class);
     Configuration configuration = createNiceMock(Configuration.class);
     Connection connection = createNiceMock(Connection.class);
@@ -112,8 +111,6 @@ public class UpgradeCatalog240Test {
     Capture<List<DBAccessor.DBColumnInfo>> capturedSettingColumns = EasyMock.newCapture();
 
     dbAccessor.addColumn(eq("adminpermission"), capture(capturedColumnInfo));
-    dbAccessor.addColumn(eq(UpgradeCatalog240.SERVICE_COMPONENT_DESIRED_STATE_TABLE), capture(capturedScColumnInfo));
-
     dbAccessor.createTable(eq("setting"), capture(capturedSettingColumns), eq("id"));
     expect(configuration.getDatabaseUrl()).andReturn(Configuration.JDBC_IN_MEMORY_URL).anyTimes();
     expect(dbAccessor.getConnection()).andReturn(connection);
@@ -179,15 +176,6 @@ public class UpgradeCatalog240Test {
     Assert.assertEquals(1, columnInfo.getDefaultValue());
     Assert.assertEquals(false, columnInfo.isNullable());
 
-    // Verify if recovery_enabled column was added to servicecomponentdesiredstate table
-    DBAccessor.DBColumnInfo columnScInfo = capturedScColumnInfo.getValue();
-    Assert.assertNotNull(columnScInfo);
-    Assert.assertEquals(UpgradeCatalog240.RECOVERY_ENABLED_COL, columnScInfo.getName());
-    Assert.assertEquals(null, columnScInfo.getLength());
-    Assert.assertEquals(Short.class, columnScInfo.getType());
-    Assert.assertEquals(0, columnScInfo.getDefaultValue());
-    Assert.assertEquals(false, columnScInfo.isNullable());
-
     Map<String, Class> expectedCaptures = new HashMap<>();
     expectedCaptures.put("id", Long.class);
     expectedCaptures.put("name", String.class);

http://git-wip-us.apache.org/repos/asf/ambari/blob/4d3839c7/ambari-server/src/test/java/org/apache/ambari/server/utils/StageUtilsTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/utils/StageUtilsTest.java b/ambari-server/src/test/java/org/apache/ambari/server/utils/StageUtilsTest.java
index 215d137..854263c 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/utils/StageUtilsTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/utils/StageUtilsTest.java
@@ -37,7 +37,6 @@ import org.apache.ambari.server.agent.ExecutionCommand;
 import org.apache.ambari.server.api.services.AmbariMetaInfo;
 import org.apache.ambari.server.orm.DBAccessor;
 import org.apache.ambari.server.orm.dao.HostDAO;
-import org.apache.ambari.server.orm.dao.HostRoleCommandDAO;
 import org.apache.ambari.server.security.SecurityHelper;
 import org.apache.ambari.server.security.encryption.CredentialStoreService;
 import org.apache.ambari.server.stack.StackManagerFactory;
@@ -119,7 +118,6 @@ public class StageUtilsTest extends EasyMockSupport {
         bind(HostRoleCommandFactory.class).to(HostRoleCommandFactoryImpl.class);
         bind(HostDAO.class).toInstance(createNiceMock(HostDAO.class));
         bind(PersistedState.class).toInstance(createNiceMock(PersistedState.class));
-        bind(HostRoleCommandDAO.class).toInstance(createNiceMock(HostRoleCommandDAO.class));
       }
     });
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/4d3839c7/ambari-server/src/test/python/stacks/2.0.6/common/test_stack_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/common/test_stack_advisor.py b/ambari-server/src/test/python/stacks/2.0.6/common/test_stack_advisor.py
index e15582e..7c578f2 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/common/test_stack_advisor.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/common/test_stack_advisor.py
@@ -1820,7 +1820,7 @@ class TestHDP206StackAdvisor(TestCase):
     )
     recommendedDefaults = {"property1": "file:///grid/0/var/dir"}
     warn = self.stackAdvisor.validatorNotRootFs(properties, recommendedDefaults, 'property1', hostInfo)
-    self.assertTrue(warn != None)
+    self.assertIsNotNone(warn)
     self.assertEquals({'message': 'It is not recommended to use root partition for property1', 'level': 'WARN'}, warn)
 
     # Set by user /var mountpoint, which is non-root , but not preferable - no warning
@@ -1831,7 +1831,7 @@ class TestHDP206StackAdvisor(TestCase):
         "mountpoint" : "/var"
       }
     )
-    self.assertTrue(self.stackAdvisor.validatorNotRootFs(properties, recommendedDefaults, 'property1', hostInfo) == None)
+    self.assertIsNone(self.stackAdvisor.validatorNotRootFs(properties, recommendedDefaults, 'property1', hostInfo))
 
   def test_validatorEnoughDiskSpace(self):
     reqiuredDiskSpace = 1048576
@@ -1847,7 +1847,7 @@ class TestHDP206StackAdvisor(TestCase):
       }
     ]}
     properties = {"property1": "file:///var/dir"}
-    self.assertTrue(self.stackAdvisor.validatorEnoughDiskSpace(properties, 'property1', hostInfo, reqiuredDiskSpace) == None)
+    self.assertIsNone(self.stackAdvisor.validatorEnoughDiskSpace(properties, 'property1', hostInfo, reqiuredDiskSpace))
 
     # local FS, no enough space
     hostInfo = {"disk_info": [
@@ -1858,16 +1858,16 @@ class TestHDP206StackAdvisor(TestCase):
       }
     ]}
     warn = self.stackAdvisor.validatorEnoughDiskSpace(properties, 'property1', hostInfo, reqiuredDiskSpace)
-    self.assertTrue(warn != None)
+    self.assertIsNotNone(warn)
     self.assertEquals({'message': errorMsg, 'level': 'WARN'}, warn)
 
     # non-local FS, HDFS
     properties = {"property1": "hdfs://h1"}
-    self.assertTrue(self.stackAdvisor.validatorEnoughDiskSpace(properties, 'property1', hostInfo, reqiuredDiskSpace) == None)
+    self.assertIsNone(self.stackAdvisor.validatorEnoughDiskSpace(properties, 'property1', hostInfo, reqiuredDiskSpace))
 
     # non-local FS, WASB
     properties = {"property1": "wasb://h1"}
-    self.assertTrue(self.stackAdvisor.validatorEnoughDiskSpace(properties, 'property1', hostInfo, reqiuredDiskSpace) == None)
+    self.assertIsNone(self.stackAdvisor.validatorEnoughDiskSpace(properties, 'property1', hostInfo, reqiuredDiskSpace))
 
   def test_round_to_n(self):
     self.assertEquals(self.stack_advisor_impl.round_to_n(0), 0)

http://git-wip-us.apache.org/repos/asf/ambari/blob/4d3839c7/ambari-server/src/test/python/stacks/2.2/common/test_stack_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.2/common/test_stack_advisor.py b/ambari-server/src/test/python/stacks/2.2/common/test_stack_advisor.py
index d230030..14a28d3 100644
--- a/ambari-server/src/test/python/stacks/2.2/common/test_stack_advisor.py
+++ b/ambari-server/src/test/python/stacks/2.2/common/test_stack_advisor.py
@@ -1028,7 +1028,7 @@ class TestHDP22StackAdvisor(TestCase):
       'hive-site': {
         'properties': {
           'hive.server2.enable.doAs': 'true',
-          'hive.server2.tez.default.queues': "queue1,queue2",
+          'hive.server2.tez.default.queues': "default",
           'hive.server2.tez.initialize.default.sessions': 'false',
           'hive.server2.tez.sessions.per.default.queue': '1',
           'hive.auto.convert.join.noconditionaltask.size': '268435456',
@@ -1073,16 +1073,7 @@ class TestHDP22StackAdvisor(TestCase):
          'hive.server2.authentication.kerberos.keytab': {'delete': 'true'},
          'hive.server2.authentication.ldap.url': {'delete': 'true'},
          'hive.server2.tez.default.queues': {
-           "entries": [
-             {
-               "value": "queue1",
-               "label": "queue1 queue"
-             },
-             {
-               "value": "queue2",
-               "label": "queue2 queue"
-             }
-           ]
+           'entries': [{'value': 'default', 'label': 'default queue'}]
           }
         }
       },
@@ -2061,7 +2052,6 @@ class TestHDP22StackAdvisor(TestCase):
           "timeline.metrics.cluster.aggregate.splitpoints": " ",
           "timeline.metrics.host.aggregate.splitpoints": " ",
           "timeline.metrics.host.aggregator.ttl": "1",
-          "timeline.metrics.service.handler.thread.count": "20",
           'timeline.metrics.service.watcher.disabled': 'false'
         }
       }

http://git-wip-us.apache.org/repos/asf/ambari/blob/4d3839c7/ambari-server/src/test/python/stacks/2.3/common/test_stack_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.3/common/test_stack_advisor.py b/ambari-server/src/test/python/stacks/2.3/common/test_stack_advisor.py
index 03ae6cc..545a2b5 100644
--- a/ambari-server/src/test/python/stacks/2.3/common/test_stack_advisor.py
+++ b/ambari-server/src/test/python/stacks/2.3/common/test_stack_advisor.py
@@ -1630,7 +1630,6 @@ class TestHDP23StackAdvisor(TestCase):
   def test_validateHAWQConfigurations(self):
     services = self.load_json("services-hawq-3-hosts.json")
     # setup default configuration values
-    # Test hawq_rm_yarn_address and hawq_rm_scheduler_address are set correctly
     configurations = services["configurations"]
     configurations["hawq-site"] = {"properties": {"hawq_rm_yarn_address": "localhost:8032",
                                                   "hawq_rm_yarn_scheduler_address": "localhost:8030"}}
@@ -1665,48 +1664,3 @@ class TestHDP23StackAdvisor(TestCase):
     self.assertEqual(len(problems), 2)
     self.assertEqual(problems_dict, expected_warnings)
 
-    # Test hawq_global_rm_type validation
-    services = {
-                 "services" : [
-                   {
-                     "StackServices" : {
-                     "service_name" : "HAWQ"
-                     },
-                     "components": []
-                   } ],
-                 "configurations":
-                   {
-                     "hawq-site": {
-                       "properties": {
-                         "hawq_global_rm_type": "yarn"
-                       }
-                     }
-                   }
-                }
-    properties = services["configurations"]["hawq-site"]["properties"]
-
-    # case 1: hawq_global_rm_type is set as yarn, but YARN service is not installed. Validation error expected.
-    """
-    Validation error expected is as below:
-                    [ {
-                          "config-type": "hawq-site",
-                          "message": "hawq_global_rm_type must be set to none if YARN service is not installed",
-                          "type": "configuration",
-                          "config-name": "hawq_global_rm_type",
-                          "level": "ERROR"
-                    } ]
-    """
-    problems = self.stackAdvisor.validateHAWQConfigurations(properties, defaults, services["configurations"], services, hosts)
-    self.assertEqual(len(problems), 1)
-    self.assertEqual(problems[0]["config-type"], "hawq-site")
-    self.assertEqual(problems[0]["message"], "hawq_global_rm_type must be set to none if YARN service is not installed")
-    self.assertEqual(problems[0]["type"], "configuration")
-    self.assertEqual(problems[0]["config-name"], "hawq_global_rm_type")
-    self.assertEqual(problems[0]["level"], "ERROR")
-
-
-    # case 2: hawq_global_rm_type is set as yarn, and YARN service is installed. No validation errors expected.
-    services["services"].append({"StackServices" : {"service_name" : "YARN"}, "components":[]})
-
-    problems = self.stackAdvisor.validateHAWQConfigurations(properties, defaults, services["configurations"], services, hosts)
-    self.assertEqual(len(problems), 0)

http://git-wip-us.apache.org/repos/asf/ambari/blob/4d3839c7/ambari-web/app/assets/test/tests.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/assets/test/tests.js b/ambari-web/app/assets/test/tests.js
index 4edcc5e..06c4c31 100644
--- a/ambari-web/app/assets/test/tests.js
+++ b/ambari-web/app/assets/test/tests.js
@@ -239,7 +239,6 @@ var files = [
   'test/views/main/dashboard/widgets/uptime_text_widget_test',
   'test/views/main/dashboard/widgets/node_managers_live_test',
   'test/views/main/dashboard/widgets/datanode_live_test',
-  'test/views/main/dashboard/widgets/hawqsegment_live_test',
   'test/views/main/dashboard/widgets/hbase_average_load_test',
   'test/views/main/dashboard/widgets/hbase_regions_in_transition_test',
   'test/views/main/dashboard/widgets/namenode_rpc_test',

http://git-wip-us.apache.org/repos/asf/ambari/blob/4d3839c7/ambari-web/app/controllers/main/admin/highAvailability/hawq/addStandby/step3_controller.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/controllers/main/admin/highAvailability/hawq/addStandby/step3_controller.js b/ambari-web/app/controllers/main/admin/highAvailability/hawq/addStandby/step3_controller.js
index fd28ad5..513a519 100644
--- a/ambari-web/app/controllers/main/admin/highAvailability/hawq/addStandby/step3_controller.js
+++ b/ambari-web/app/controllers/main/admin/highAvailability/hawq/addStandby/step3_controller.js
@@ -141,7 +141,7 @@ App.AddHawqStandbyWizardStep3Controller = Em.Controller.extend({
   submit: function () {
     if (!this.get('isSubmitDisabled')) {
       dataDir = this.get('hawqProps').items[0].properties['hawq_master_directory'];
-      hawqStandby = this.get('content.hawqHosts.newHawqStandby');
+      hawqStandby = this.get('hawqProps').items[0].properties['hawq_standby_address_host']
       App.showConfirmationPopup(
         function() {
           App.get('router.mainAdminKerberosController').getKDCSessionState(function() {

http://git-wip-us.apache.org/repos/asf/ambari/blob/4d3839c7/ambari-web/app/mappers/components_state_mapper.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/mappers/components_state_mapper.js b/ambari-web/app/mappers/components_state_mapper.js
index ac3e1b5..0f2b627 100644
--- a/ambari-web/app/mappers/components_state_mapper.js
+++ b/ambari-web/app/mappers/components_state_mapper.js
@@ -59,11 +59,6 @@ App.componentsStateMapper = App.QuickDataMapper.create({
       node_managers_installed: 'INSTALLED_PATH',
       node_managers_total: 'TOTAL_PATH'
     },
-    'HAWQSEGMENT': {
-      hawq_segments_started: 'STARTED_PATH',
-      hawq_segments_installed: 'INSTALLED_PATH',
-      hawq_segments_total: 'TOTAL_PATH'
-    },
     'HBASE_REGIONSERVER': {
       region_servers_started: 'STARTED_PATH',
       region_servers_installed: 'INSTALLED_PATH',

http://git-wip-us.apache.org/repos/asf/ambari/blob/4d3839c7/ambari-web/app/messages.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/messages.js b/ambari-web/app/messages.js
index 95f87f8..1246a5c 100644
--- a/ambari-web/app/messages.js
+++ b/ambari-web/app/messages.js
@@ -2532,7 +2532,6 @@ Em.I18n.translations = {
   'dashboard.widgets.YARNLinks': 'YARN Links',
   'dashboard.widgets.error.invalid': 'Invalid! Enter a number between 0 - {0}',
   'dashboard.widgets.error.smaller': 'Threshold 1 should be smaller than threshold 2!',
-  'dashboard.widgets.HawqSegmentUp': 'HAWQ Segments Live',
 
   'dashboard': {
     'widgets': {
@@ -2637,10 +2636,6 @@ Em.I18n.translations = {
   'dashboard.services.hbase.masterStarted':'Master Started',
   'dashboard.services.hbase.masterActivated':'Master Activated',
 
-  'dashboard.services.hawq.segments.started':'started',
-  'dashboard.services.hawq.segments.stopped':'stopped',
-  'dashboard.services.hawq.segments.total':'in total',
-
   'dashboard.services.hive.clients':'Hive Clients',
   'dashboard.services.hive.client':'Hive Client',
   'dashboard.services.hive.metastore':'Hive Metastore',

http://git-wip-us.apache.org/repos/asf/ambari/blob/4d3839c7/ambari-web/app/models/alerts/alert_definition.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/models/alerts/alert_definition.js b/ambari-web/app/models/alerts/alert_definition.js
index 6c25f7e..e91bd4f 100644
--- a/ambari-web/app/models/alerts/alert_definition.js
+++ b/ambari-web/app/models/alerts/alert_definition.js
@@ -41,8 +41,8 @@ App.AlertDefinition = DS.Model.extend({
   groups: DS.hasMany('App.AlertGroup'),
   reporting: DS.hasMany('App.AlertReportDefinition'),
   parameters: DS.hasMany('App.AlertDefinitionParameter'),
-  lastTriggered: 0,
-  lastTriggeredRaw: 0,
+  lastTriggered: DS.attr('number'),
+  lastTriggeredRaw: DS.attr('number'),
 
   //relates only to SCRIPT-type alert definition
   location: DS.attr('string'),

http://git-wip-us.apache.org/repos/asf/ambari/blob/4d3839c7/ambari-web/app/views.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views.js b/ambari-web/app/views.js
index 18b43a8..4b6b857 100644
--- a/ambari-web/app/views.js
+++ b/ambari-web/app/views.js
@@ -220,7 +220,6 @@ require('views/main/dashboard/widgets/namenode_heap');
 require('views/main/dashboard/widgets/namenode_cpu');
 require('views/main/dashboard/widgets/hdfs_capacity');
 require('views/main/dashboard/widgets/datanode_live');
-require('views/main/dashboard/widgets/hawqsegment_live');
 require('views/main/dashboard/widgets/namenode_rpc');
 require('views/main/dashboard/widgets/metrics_memory');
 require('views/main/dashboard/widgets/metrics_network');

http://git-wip-us.apache.org/repos/asf/ambari/blob/4d3839c7/ambari-web/app/views/common/configs/widgets/list_config_widget_view.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views/common/configs/widgets/list_config_widget_view.js b/ambari-web/app/views/common/configs/widgets/list_config_widget_view.js
index 568c405..fcf2aac 100644
--- a/ambari-web/app/views/common/configs/widgets/list_config_widget_view.js
+++ b/ambari-web/app/views/common/configs/widgets/list_config_widget_view.js
@@ -288,15 +288,16 @@ App.ListConfigWidgetView = App.ConfigWidgetView.extend({
   },
 
   isOptionExist: function(value) {
-    var isExist = true;
-    if (Em.isNone(value)) {
-      return !isExist;
-    } else {
+    var isExist = false;
+    if (value !== null && value !== undefined) {
       value = Em.typeOf(value) == 'string' ? value.split(',') : value;
       value.forEach(function(item) {
-        isExist = isExist && this.get('options').mapProperty('value').contains(item);
+        isExist = this.get('options').mapProperty('value').contains(item);
       }, this);
       return isExist;
+    } else {
+      return false;
     }
   }
+
 });

http://git-wip-us.apache.org/repos/asf/ambari/blob/4d3839c7/ambari-web/app/views/main/dashboard/widgets.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views/main/dashboard/widgets.js b/ambari-web/app/views/main/dashboard/widgets.js
index 8a86af6..a2fb281 100644
--- a/ambari-web/app/views/main/dashboard/widgets.js
+++ b/ambari-web/app/views/main/dashboard/widgets.js
@@ -128,8 +128,7 @@ App.MainDashboardWidgetsView = Em.View.extend(App.UserPref, App.LocalStorage, Ap
       '13', '12', '14', '16', //hbase
       '17', '18', '19', '20', '23', // all yarn
       '21', // storm
-      '22', // flume
-      '24' // hawq
+      '22' // flume
     ]; // all in order
     var hiddenFull = [
       ['15', 'Region In Transition']
@@ -174,12 +173,6 @@ App.MainDashboardWidgetsView = Em.View.extend(App.UserPref, App.LocalStorage, Ap
         visibleFull = visibleFull.without(item);
       }, this);
     }
-    if (this.get('hawq_model') == null) {
-      var hawq = ['24'];
-      hawq.forEach(function (item) {
-        visibleFull = visibleFull.without(item);
-      }, this);
-    }
     var obj = this.get('initPrefObject');
     obj.set('visible', visibleFull);
     obj.set('hidden', hiddenFull);
@@ -199,8 +192,6 @@ App.MainDashboardWidgetsView = Em.View.extend(App.UserPref, App.LocalStorage, Ap
 
   flume_model: null,
 
-  hawq_model: null,
-
   /**
    * List of visible widgets
    * @type {Ember.Enumerable}
@@ -392,8 +383,7 @@ App.MainDashboardWidgetsView = Em.View.extend(App.UserPref, App.LocalStorage, Ap
       hbase_model: ['12', '13', '14', '15', '16'],
       yarn_model: ['17', '18', '19', '20', '23'],
       storm_model: ['21'],
-      flume_model: ['22'],
-      hawq_model: ['24']
+      flume_model: ['22']
     };
 
     // check each service, find out the newly added service and already deleted service
@@ -460,8 +450,7 @@ App.MainDashboardWidgetsView = Em.View.extend(App.UserPref, App.LocalStorage, Ap
       '20': App.YARNMemoryPieChartView,
       '21': App.SuperVisorUpView,
       '22': App.FlumeAgentUpView,
-      '23': App.YARNLinksView,
-      '24': App.HawqSegmentUpView
+      '23': App.YARNLinksView
     }, id);
   },
 
@@ -478,7 +467,7 @@ App.MainDashboardWidgetsView = Em.View.extend(App.UserPref, App.LocalStorage, Ap
     visible: [],
     hidden: [],
     threshold: {1: [80, 90], 2: [85, 95], 3: [90, 95], 4: [80, 90], 5: [1000, 3000], 6: [], 7: [], 8: [], 9: [], 10: [], 11: [], 12: [], 13: [70, 90], 14: [150, 250], 15: [3, 10], 16: [],
-      17: [70, 90], 18: [], 19: [50, 75], 20: [50, 75], 21: [85, 95], 22: [85, 95], 23: [], 24: [80, 90]} // id:[thresh1, thresh2]
+      17: [70, 90], 18: [], 19: [50, 75], 20: [50, 75], 21: [85, 95], 22: [85, 95], 23: []} // id:[thresh1, thresh2]
   }),
 
   /**

http://git-wip-us.apache.org/repos/asf/ambari/blob/4d3839c7/ambari-web/app/views/main/dashboard/widgets/hawqsegment_live.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views/main/dashboard/widgets/hawqsegment_live.js b/ambari-web/app/views/main/dashboard/widgets/hawqsegment_live.js
deleted file mode 100644
index e8d0656..0000000
--- a/ambari-web/app/views/main/dashboard/widgets/hawqsegment_live.js
+++ /dev/null
@@ -1,190 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-var App = require('app');
-
-App.HawqSegmentUpView = App.TextDashboardWidgetView.extend({
-
-  title: Em.I18n.t('dashboard.widgets.HawqSegmentUp'),
-  id: '24',
-
-  isPieChart: false,
-  isText: true,
-  isProgressBar: false,
-  model_type: 'hawq',
-
-  hiddenInfo: function () {
-    var result = [];
-    result.pushObject(this.get('hawqSegmentsStarted') + ' ' + Em.I18n.t('dashboard.services.hawq.segments.started'));
-    result.pushObject(this.get('hawqSegmentsInstalled') + ' ' + Em.I18n.t('dashboard.services.hawq.segments.stopped'));
-    result.pushObject(this.get('hawqSegmentsTotal')+ ' ' + Em.I18n.t('dashboard.services.hawq.segments.total'));
-    return result;
-  }.property('hawqSegmentsStarted', 'hawqSegmentsInstalled', 'hawqSegmentsTotal'),
-  hiddenInfoClass: "hidden-info-three-line",
-
-  thresh1: 40,
-  thresh2: 70,
-  maxValue: 100,
-
-  hawqSegmentsStarted: function () {
-    if (Em.isNone(this.get('model.hawqSegmentsStarted'))) {
-      return Em.I18n.t('services.service.summary.notAvailable');
-    }
-    return this.get('model.hawqSegmentsStarted');
-  }.property('model.hawqSegmentsStarted'),
-
-  hawqSegmentsInstalled: function () {
-    if (Em.isNone(this.get('model.hawqSegmentsInstalled'))) {
-      return Em.I18n.t('services.service.summary.notAvailable');
-    }
-    return this.get('model.hawqSegmentsInstalled');
-  }.property('model.hawqSegmentsInstalled'),
-
-  hawqSegmentsTotal: function () {
-    if (Em.isNone(this.get('model.hawqSegmentsTotal'))) {
-      return Em.I18n.t('services.service.summary.notAvailable');
-    }
-    return this.get('model.hawqSegmentsTotal');
-  }.property('model.hawqSegmentsTotal'),
-
-  data: function () {
-    if (Em.isNone(this.get('model.hawqSegmentsStarted')) || Em.isNone(this.get('model.hawqSegmentsTotal'))) {
-      return null;
-    } else {
-      return ((this.get('hawqSegmentsStarted') / this.get('model.hawqSegmentsTotal')).toFixed(2)) * 100;
-    }
-  }.property('model.hawqSegmentsTotal', 'hawqSegmentsStarted'),
-
-  content: function () {
-    if (Em.isNone(this.get('model.hawqSegmentsStarted')) || Em.isNone(this.get('model.hawqSegmentsTotal'))) {
-      return Em.I18n.t('services.service.summary.notAvailable');
-    } else {
-      return this.get('hawqSegmentsStarted') + "/" + this.get('model.hawqSegmentsTotal');
-    }
-  }.property('model.hawqSegmentsTotal', 'hawqSegmentsStarted'),
-
-  editWidget: function (event) {
-    var parent = this;
-    var max_tmp =  parseFloat(parent.get('maxValue'));
-    var configObj = Ember.Object.create({
-      thresh1: parent.get('thresh1') + '',
-      thresh2: parent.get('thresh2') + '',
-      hintInfo: Em.I18n.t('dashboard.widgets.hintInfo.hint1').format(max_tmp),
-      isThresh1Error: false,
-      isThresh2Error: false,
-      errorMessage1: "",
-      errorMessage2: "",
-      maxValue: max_tmp,
-      observeNewThresholdValue: function () {
-        var thresh1 = this.get('thresh1');
-        var thresh2 = this.get('thresh2');
-        if (thresh1.trim() != "") {
-          if (isNaN(thresh1) || thresh1 > max_tmp || thresh1 < 0){
-            this.set('isThresh1Error', true);
-            this.set('errorMessage1', 'Invalid! Enter a number between 0 - ' + max_tmp);
-          } else if ( this.get('isThresh2Error') === false && parseFloat(thresh2)<= parseFloat(thresh1)) {
-            this.set('isThresh1Error', true);
-            this.set('errorMessage1', 'Threshold 1 should be smaller than threshold 2 !');
-          } else {
-            this.set('isThresh1Error', false);
-            this.set('errorMessage1', '');
-          }
-        } else {
-          this.set('isThresh1Error', true);
-          this.set('errorMessage1', 'This is required');
-        }
-
-        if (thresh2.trim() != "") {
-          if (isNaN(thresh2) || thresh2 > max_tmp || thresh2 < 0) {
-            this.set('isThresh2Error', true);
-            this.set('errorMessage2', 'Invalid! Enter a number between 0 - ' + max_tmp);
-          } else {
-            this.set('isThresh2Error', false);
-            this.set('errorMessage2', '');
-          }
-        } else {
-          this.set('isThresh2Error', true);
-          this.set('errorMessage2', 'This is required');
-        }
-
-        // update the slider handles and color
-        if (this.get('isThresh1Error') === false && this.get('isThresh2Error') === false) {
-          $("#slider-range").slider('values', 0 , parseFloat(thresh1));
-          $("#slider-range").slider('values', 1 , parseFloat(thresh2));
-        }
-      }.observes('thresh1', 'thresh2')
-
-    });
-
-    var browserVerion = this.getInternetExplorerVersion();
-    App.ModalPopup.show({
-      header: Em.I18n.t('dashboard.widgets.popupHeader'),
-      classNames: [ 'sixty-percent-width-modal-edit-widget'],
-      bodyClass: Ember.View.extend({
-        templateName: require('templates/main/dashboard/edit_widget_popup'),
-        configPropertyObj: configObj
-      }),
-      primary: Em.I18n.t('common.apply'),
-      onPrimary: function () {
-        configObj.observeNewThresholdValue();
-        if (!configObj.isThresh1Error && !configObj.isThresh2Error) {
-          parent.set('thresh1', parseFloat(configObj.get('thresh1')) );
-          parent.set('thresh2', parseFloat(configObj.get('thresh2')) );
-          if (!App.get('testMode')) {
-            var big_parent = parent.get('parentView');
-            big_parent.getUserPref(big_parent.get('persistKey'));
-            var oldValue = big_parent.get('currentPrefObject');
-            oldValue.threshold[parseInt(parent.id)] = [configObj.get('thresh1'), configObj.get('thresh2')];
-            big_parent.postUserPref(big_parent.get('persistKey'),oldValue);
-          }
-          this.hide();
-        }
-      },
-
-      didInsertElement: function () {
-        var handlers = [configObj.get('thresh1'), configObj.get('thresh2')];
-        var colors = [App.healthStatusRed, App.healthStatusOrange, App.healthStatusGreen]; //color red, orange, green
-
-        if (browserVerion == -1 || browserVerion > 9) {
-          configObj.set('isIE9', false);
-          configObj.set('isGreenOrangeRed', false);
-          $("#slider-range").slider({
-            range: true,
-            min: 0,
-            max: max_tmp,
-            values: handlers,
-            create: function (event, ui) {
-              parent.updateColors(handlers, colors);
-            },
-            slide: function (event, ui) {
-              parent.updateColors(ui.values, colors);
-              configObj.set('thresh1', ui.values[0] + '');
-              configObj.set('thresh2', ui.values[1] + '');
-            },
-            change: function (event, ui) {
-              parent.updateColors(ui.values, colors);
-            }
-          });
-        } else {
-          configObj.set('isIE9', true);
-          configObj.set('isGreenOrangeRed', false);
-        }
-      }
-    });
-  }
-});