You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by nc...@apache.org on 2016/02/24 16:07:48 UTC

[01/50] [abbrv] ambari git commit: Merge branch 'trunk' into branch-dev-patch-upgrade

Repository: ambari
Updated Branches:
  refs/heads/trunk 6e002b25c -> db999ae82


Merge branch 'trunk' into branch-dev-patch-upgrade


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/f40df6b5
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/f40df6b5
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/f40df6b5

Branch: refs/heads/trunk
Commit: f40df6b57819f749993547c8e17e74493d354e84
Parents: a385f77 5164bc5
Author: Nate Cole <nc...@hortonworks.com>
Authored: Mon Jan 18 08:31:02 2016 -0500
Committer: Nate Cole <nc...@hortonworks.com>
Committed: Mon Jan 18 08:31:02 2016 -0500

----------------------------------------------------------------------
 .../core/providers/accounts.py                  | 20 +++++--
 .../core/resources/accounts.py                  |  6 +++
 .../python/resource_management/core/system.py   | 17 +-----
 .../python/resource_management/core/utils.py    | 16 ++++++
 .../STORM/0.9.1.2.1/package/scripts/storm.py    | 11 ++++
 .../0.9.1.2.1/package/scripts/ui_server.py      | 15 ++++++
 .../HDP/2.0.6/configuration/cluster-env.xml     | 13 +++++
 .../2.0.6/hooks/before-ANY/scripts/params.py    |  1 +
 .../before-ANY/scripts/shared_initialization.py |  6 +--
 .../services/YARN/configuration/yarn-site.xml   | 45 ++++++++++++++++
 .../stacks/HDP/2.4/services/stack_advisor.py    |  3 +-
 .../python/stacks/2.0.6/configs/default.json    |  3 +-
 .../2.0.6/hooks/before-ANY/test_before_any.py   | 23 ++++++--
 .../stacks/2.1/STORM/test_storm_ui_server.py    | 29 ++++++++++
 ambari-web/app/controllers/main/service/item.js | 57 +++++++++++++++++++-
 ambari-web/app/utils/ajax/ajax.js               |  5 ++
 .../test/controllers/main/service/item_test.js  | 41 ++++++++++++++
 17 files changed, 280 insertions(+), 31 deletions(-)
----------------------------------------------------------------------



[23/50] [abbrv] ambari git commit: Merge branch 'trunk' into branch-dev-patch-upgrade

Posted by nc...@apache.org.
Merge branch 'trunk' into branch-dev-patch-upgrade


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/c543ef8b
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/c543ef8b
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/c543ef8b

Branch: refs/heads/trunk
Commit: c543ef8b23f75c828d87e6d0fc4866b567dd5dd7
Parents: 697c309 46bbbf9
Author: Nate Cole <nc...@hortonworks.com>
Authored: Wed Feb 10 11:12:38 2016 -0500
Committer: Nate Cole <nc...@hortonworks.com>
Committed: Wed Feb 10 11:12:38 2016 -0500

----------------------------------------------------------------------
 .../stackVersions/StackVersionsCreateCtrl.js    |   7 +-
 .../main/python/ambari_agent/PythonExecutor.py  |   5 +-
 ambari-metrics/ambari-metrics-assembly/pom.xml  |  12 +-
 .../server/controller/AuthToLocalBuilder.java   | 287 ++++++++++-------
 .../server/controller/KerberosHelperImpl.java   |  23 +-
 .../internal/ServiceResourceProvider.java       | 197 ++++++------
 .../ambari/server/state/quicklinks/Link.java    |  19 +-
 .../main/python/ambari_server/serverSetup.py    |  17 -
 ambari-server/src/main/python/bootstrap.py      |   4 +-
 .../0.1.0/package/scripts/metrics_grafana.py    |   2 +
 .../package/scripts/metrics_grafana_util.py     | 131 ++++++++
 .../metrics_grafana_datasource.json.j2          |  33 ++
 .../FALCON/0.5.0.2.1/quicklinks/quicklinks.json |   1 -
 .../1.10.3-10/configuration/kerberos-env.xml    |  14 +
 .../OOZIE/4.0.0.2.0/package/scripts/oozie.py    |  71 ++++-
 .../4.0.0.2.0/package/scripts/oozie_server.py   |   3 +-
 .../package/scripts/oozie_server_upgrade.py     |  18 +-
 .../PXF/3.0.0/package/scripts/params.py         |  17 +-
 .../PXF/3.0.0/package/scripts/service_check.py  |  81 +++--
 .../RANGER/0.4.0/package/scripts/params.py      |   2 +
 .../0.4.0/package/scripts/ranger_admin.py       |  50 +--
 .../0.4.0/package/scripts/service_check.py      |  12 +-
 .../0.4.0/package/scripts/status_params.py      |  27 ++
 .../STORM/0.9.1.2.1/quicklinks/quicklinks.json  |   1 -
 .../main/resources/scripts/Ambaripreupload.py   |  31 +-
 .../HDP/2.0.6/services/HBASE/metainfo.xml       |   7 +
 .../services/HBASE/quicklinks/quicklinks.json   |  97 ++++++
 .../stacks/HDP/2.0.6/services/HDFS/metainfo.xml |   7 +
 .../services/HDFS/quicklinks/quicklinks.json    |  76 +++++
 .../HDP/2.0.6/services/OOZIE/metainfo.xml       |   5 +-
 .../services/OOZIE/quicklinks/quicklinks.json   |   1 -
 .../stacks/HDP/2.0.6/services/YARN/metainfo.xml |  13 +
 .../YARN/quicklinks-mapred/quicklinks.json      |  76 +++++
 .../services/YARN/quicklinks/quicklinks.json    |  76 +++++
 .../main/resources/stacks/HDP/2.1/metainfo.xml  |   2 +-
 .../services/RANGER/quicklinks/quicklinks.json  |   1 -
 .../services/RANGER/themes/theme_version_1.json |  20 +-
 .../services/SPARK/quicklinks/quicklinks.json   |   1 -
 .../ACCUMULO/quicklinks/quicklinks.json         |   1 -
 .../services/ATLAS/quicklinks/quicklinks.json   |   1 -
 .../services/HBASE/quicklinks/quicklinks.json   |   6 -
 .../services/HDFS/quicklinks/quicklinks.json    |   4 -
 .../services/OOZIE/quicklinks/quicklinks.json   |   1 -
 .../services/RANGER/quicklinks/quicklinks.json  |   1 -
 .../services/RANGER/themes/theme_version_2.json |  40 ++-
 .../services/SPARK/quicklinks/quicklinks.json   |   1 -
 .../YARN/quicklinks-mapred/quicklinks.json      |   4 -
 .../services/YARN/quicklinks/quicklinks.json    |   4 -
 .../controller/AuthToLocalBuilderTest.java      | 315 ++++++++++++-------
 .../AMBARI_METRICS/test_metrics_grafana.py      |  19 +-
 .../stacks/2.0.6/OOZIE/test_oozie_server.py     | 172 +++++-----
 .../resources/child_quicklinks_to_merge.json    |   3 -
 .../resources/child_quicklinks_to_override.json |   4 -
 .../src/test/resources/parent_quicklinks.json   |   4 -
 .../assets/data/configurations/quicklinks.json  |   4 -
 .../app/controllers/global/update_controller.js |   8 +-
 .../manage_alert_notifications_controller.js    |  74 +++--
 .../app/controllers/wizard/step4_controller.js  | 122 +++++--
 ambari-web/app/data/HDP2/site_properties.js     |   7 +
 ambari-web/app/router.js                        |  59 ++--
 .../app/views/common/quick_view_link_view.js    |  10 +-
 ambari-web/app/views/main/dashboard/widgets.js  |   5 +
 ...anage_alert_notifications_controller_test.js | 121 ++++---
 .../test/controllers/wizard/step4_test.js       |  40 ++-
 .../views/common/log_file_search_view_test.js   |   3 +-
 .../test/views/main/dashboard/widgets_test.js   |  42 ++-
 66 files changed, 1730 insertions(+), 792 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/c543ef8b/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/stackVersions/StackVersionsCreateCtrl.js
----------------------------------------------------------------------
diff --cc ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/stackVersions/StackVersionsCreateCtrl.js
index df344e6,532e5f4..931b7ec
--- a/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/stackVersions/StackVersionsCreateCtrl.js
+++ b/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/stackVersions/StackVersionsCreateCtrl.js
@@@ -27,167 -29,38 +27,166 @@@ angular.module('ambariAdminConsole'
    $scope.clusterName = $routeParams.clusterName;
    $scope.subversionPattern = /^\d+\.\d+(-\d+)?$/;
    $scope.upgradeStack = {
 -    selected: null,
 -    options: []
 -  };
 -  $scope.fetchStackVersionFilterList = function () {
 -    return Stack.allStackVersions()
 -    .then(function (allStackVersions) {
 -      var versions = [];
 -      angular.forEach(allStackVersions, function (version) {
 -        if (version.upgrade_packs.length > 0 && version.active) {
 -          versions.push(version);
 -        }
 +    stack_name: '',
 +    stack_version: '',
 +    display_name: ''
 +  };
 +
 +  $scope.option1 = {
 +    index: 1,
 +    displayName: 'Upload Version Definition File',
 +    url: 'files://',
 +    hasError: false
 +  };
 +  $scope.option2 = {
 +    index: 2,
 +    displayName: 'Version Definition File URL',
 +    url: 'https://',
 +    hasError: false
 +  };
 +  $scope.selectedOption = 1;
 +
 +  /**
 +   * User can select ONLY one option to upload version definition file
 +   */
 +  $scope.toggleOptionSelect = function () {
 +    $scope.option1.hasError = false;
 +    $scope.option2.hasError = false;
 +  };
 +  $scope.clearOptionsError = function () {
 +    $scope.option1.hasError = false;
 +    $scope.option2.hasError = false;
 +  };
 +  $scope.readInfoButtonDisabled = function () {
 +    return $scope.option1.selected ? !$scope.option1.url : !$scope.option2.url;
 +  };
 +
 +  $scope.onFileSelect = function(){
 +    return {
 +      link: function($scope,el){
 +        el.bind("change", function(e){
 +          $scope.file = (e.srcElement || e.target).files[0];
 +          $scope.getFile();
 +        })
 +      }
 +    }
 +  };
 +
 +//  $scope.uploadFile = function(){
 +//    var file = $scope.myFile;
 +//    console.log('file is ' );
 +//    console.dir(file);
 +//    var uploadUrl = "/fileUpload";
 +//    fileUpload.uploadFileToUrl(file, uploadUrl);
 +//  };
 +
 +  /**
 +   * Load selected file to current page content
 +   */
 +  $scope.readVersionInfo = function(){
 +    if ($scope.option2.selected) {
 +      var url = $scope.option2.url;
 +    }
 +    /// POST url first then get the version definition info
 +    return Stack.getLatestRepo('HDP').then(function (response) {
 +      $scope.id = response.id;
 +      $scope.isPatch = response.type == 'PATCH';
 +      $scope.stackNameVersion = response.stackNameVersion || 'n/a';
 +      $scope.displayName = response.displayName || 'n/a';
 +      $scope.version = response.version || 'n/a';
 +      $scope.actualVersion = response.actualVersion || 'n/a';
 +      $scope.upgradeStack = {
 +        stack_name: response.stackName,
 +        stack_version: response.stackVersion,
 +        display_name: response.displayName
 +      };
 +      $scope.services = response.services || [];
 +      //save default values of repos to check if they were changed
 +      $scope.defaulfOSRepos = {};
 +      response.updateObj.operating_systems.forEach(function(os) {
 +        $scope.defaulfOSRepos[os.OperatingSystems.os_type] = {
 +          defaultBaseUrl: os.repositories[0].Repositories.base_url,
 +          defaultUtilsUrl: os.repositories[1].Repositories.base_url
 +        };
        });
 -      $scope.upgradeStack.options = versions;
 -      $scope.upgradeStack.selected = versions[versions.length - 1];
 -      $scope.afterStackVersionChange().then(function(){
 -        $scope.disableUnusedOS();
 +      $scope.repoVersionFullName = response.repoVersionFullName;
 +      angular.forEach(response.osList, function (os) {
 +        os.selected = true;
        });
 -    })
 -    .catch(function (data) {
 -      Alert.error($t('versions.alerts.filterListError'), data.message);
 +      $scope.osList = response.osList;
 +      // load supported os type base on stack version
 +      $scope.afterStackVersionRead();
      });
    };
 -  $scope.fetchStackVersionFilterList();
 +
 +  /**
 +   * Load supported OS list
 +   */
 +  $scope.afterStackVersionRead = function () {
 +    Stack.getSupportedOSList($scope.upgradeStack.stack_name, $scope.upgradeStack.stack_version)
 +      .then(function (data) {
 +        var operatingSystems = data.operating_systems;
 +        operatingSystems.map(function (os) {
 +          var existingOSHash = {};
 +          angular.forEach($scope.osList, function (os) {
 +            existingOSHash[os.OperatingSystems.os_type] = os;
 +          });
 +          // if os not in the list, mark as un-selected, add this to the osList
 +          if (!existingOSHash[os.OperatingSystems.os_type]) {
 +            os.selected = false;
 +            os.repositories.forEach(function(repo) {
 +              repo.Repositories.base_url = '';
 +            });
 +            $scope.osList.push(os);
 +          }
 +        });
 +      })
 +      .catch(function (data) {
 +        Alert.error($t('versions.alerts.osListError'), data.message);
 +      });
 +  };
 +
 +  /**
 +   * On click handler for removing OS
 +   */
 +  $scope.removeOS = function() {
 +    this.os.selected = false;
 +    if (this.os.repositories) {
 +      this.os.repositories.forEach(function(repo) {
 +        repo.hasError = false;
 +      });
 +    }
 +  };
 +  /**
 +   * On click handler for adding new OS
 +   */
 +  $scope.addOS = function() {
 +    this.os.selected = true;
 +    if (this.os.repositories) {
 +      this.os.repositories.forEach(function(repo) {
 +        repo.hasError = false;
 +      });
 +    }
 +  };
 +
 +  $scope.isSaveButtonDisabled = function() {
 +    var enabled = false;
 +    $scope.osList.forEach(function(os) {
 +      if (os.selected) {
 +        enabled = true
 +      }
 +    });
 +    return !enabled;
 +  }
  
    $scope.save = function () {
 -    return Stack.validateBaseUrls($scope.skipValidation, $scope.osList, $scope.upgradeStack.selected).then(function (invalidUrls) {
 +    return Stack.validateBaseUrls($scope.skipValidation, $scope.osList, $scope.upgradeStack).then(function (invalidUrls) {
        if (invalidUrls.length === 0) {
 -        Stack.addRepo($scope.upgradeStack.selected, $scope.repoSubversion, $scope.osList)
 +        Stack.addRepo($scope.upgradeStack, $scope.actualVersion, $scope.osList)
            .success(function () {
-             Alert.success($t('versions.alerts.versionCreated', {
-               stackName: $scope.upgradeStack.stack_name,
-               versionName: $scope.actualVersion
-             }));
+             var versionName = $scope.upgradeStack.selected.stack_version + '.' + $scope.repoSubversion;
+             var stackName = $scope.upgradeStack.selected.stack_name;
+             Alert.success($t('versions.alerts.versionCreated', {stackName: stackName, versionName: versionName}));
              $location.path('/stackVersions');
            })
            .error(function (data) {


[40/50] [abbrv] ambari git commit: Merge branch 'trunk' into branch-dev-patch-upgrade

Posted by nc...@apache.org.
Merge branch 'trunk' into branch-dev-patch-upgrade


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/2eea1bfa
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/2eea1bfa
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/2eea1bfa

Branch: refs/heads/trunk
Commit: 2eea1bfa9008b0e17dcca8c196ea911fdce041fd
Parents: 4d3839c 4fcca62
Author: Nate Cole <nc...@hortonworks.com>
Authored: Mon Feb 22 11:48:00 2016 -0500
Committer: Nate Cole <nc...@hortonworks.com>
Committed: Mon Feb 22 11:48:00 2016 -0500

----------------------------------------------------------------------
 .../python/resource_management/core/logger.py   |   2 +-
 .../timeline/AbstractTimelineMetricsSink.java   |  53 +++++-
 .../ApplicationHistoryServer.java               |   6 +-
 .../loadsimulator/net/RestMetricsSender.java    |   4 +-
 .../ApplicationHistoryStoreTestUtils.java       |   2 +-
 .../TestApplicationHistoryClientService.java    |   6 +-
 .../TestFileSystemApplicationHistoryStore.java  |  12 +-
 .../TestMemoryApplicationHistoryStore.java      |  12 +-
 .../webapp/TestAHSWebServices.java              |   2 +-
 .../server/checks/CheckDatabaseHelper.java      | 137 ++++++++-----
 .../server/configuration/Configuration.java     |  89 +++++++++
 .../server/controller/ControllerModule.java     |  16 ++
 .../controller/ServiceComponentRequest.java     |  35 +++-
 .../controller/ServiceComponentResponse.java    |  22 ++-
 .../internal/ComponentResourceProvider.java     |  38 ++++
 .../server/orm/dao/HostRoleCommandDAO.java      | 166 +++++++++++++---
 .../orm/entities/HostRoleCommandEntity.java     |   7 +-
 .../ServiceComponentDesiredStateEntity.java     |  11 ++
 .../serveraction/ServerActionExecutor.java      |  13 +-
 .../ambari/server/state/ServiceComponent.java   |  14 ++
 .../server/state/ServiceComponentImpl.java      |  81 +++++++-
 .../server/state/cluster/ClusterImpl.java       |  36 ++--
 .../services/AlertNoticeDispatchService.java    |  17 +-
 .../server/upgrade/UpgradeCatalog222.java       |  55 ++++++
 .../server/upgrade/UpgradeCatalog240.java       |  14 ++
 .../main/resources/Ambari-DDL-Derby-CREATE.sql  |   1 +
 .../main/resources/Ambari-DDL-MySQL-CREATE.sql  |   2 +
 .../main/resources/Ambari-DDL-Oracle-CREATE.sql |   2 +
 .../resources/Ambari-DDL-Postgres-CREATE.sql    |   4 +
 .../Ambari-DDL-Postgres-EMBEDDED-CREATE.sql     |   2 +
 .../resources/Ambari-DDL-SQLAnywhere-CREATE.sql |   2 +
 .../resources/Ambari-DDL-SQLServer-CREATE.sql   |   2 +-
 .../src/main/resources/alert-templates.xml      |  20 +-
 .../1.6.1.2.2.0/configuration/accumulo-env.xml  |   1 -
 .../0.5.0.2.1/configuration/falcon-env.xml      |   1 -
 .../FLUME/1.4.0.2.0/configuration/flume-env.xml |   1 -
 .../0.96.0.2.0/configuration/hbase-env.xml      |   1 -
 .../HDFS/2.1.0.2.0/configuration/hadoop-env.xml |   1 -
 .../HIVE/0.12.0.2.0/configuration/hive-env.xml  |   1 -
 .../HIVE/0.12.0.2.0/configuration/hive-site.xml |   1 +
 .../0.8.1.2.2/configuration/kafka-broker.xml    |   1 -
 .../OOZIE/4.0.0.2.0/configuration/oozie-env.xml |   1 -
 .../configuration-mapred/mapred-env.xml         |   1 -
 .../YARN/2.1.0.2.0/configuration/yarn-env.xml   |   1 -
 .../3.4.5.2.0/configuration/zookeeper-env.xml   |   1 -
 .../src/main/resources/properties.json          |   1 +
 .../main/resources/scripts/Ambaripreupload.py   |  46 +++--
 .../stacks/HDP/2.0.6/services/stack_advisor.py  |   5 +-
 .../services/HBASE/configuration/hbase-env.xml  |   1 -
 .../stacks/HDP/2.2/services/stack_advisor.py    |  17 +-
 .../services/HIVE/configuration/hive-site.xml   |   2 +
 .../stacks/HDP/2.3/services/stack_advisor.py    |  32 ++--
 .../stacks/HDP/2.4/services/HIVE/metainfo.xml   |  41 +++-
 .../actionmanager/TestActionScheduler.java      |  24 +--
 .../ambari/server/agent/AgentResourceTest.java  |   2 +
 .../server/checks/CheckDatabaseHelperTest.java  |  20 +-
 .../server/configuration/ConfigurationTest.java |  95 ++++++++++
 .../AmbariManagementControllerTest.java         |   3 +
 .../server/controller/KerberosHelperTest.java   |   2 +
 .../internal/ComponentResourceProviderTest.java |  37 ++--
 .../ambari/server/stack/StackManagerTest.java   |  35 ++--
 .../ambari/server/state/ConfigHelperTest.java   |   2 +
 .../server/upgrade/UpgradeCatalog222Test.java   | 134 ++++++++++++-
 .../server/upgrade/UpgradeCatalog240Test.java   |  12 ++
 .../ambari/server/utils/StageUtilsTest.java     |   2 +
 .../stacks/2.0.6/common/test_stack_advisor.py   |  12 +-
 .../stacks/2.2/common/test_stack_advisor.py     |  14 +-
 .../stacks/2.3/common/test_stack_advisor.py     |  46 +++++
 ambari-web/app/assets/test/tests.js             |   1 +
 .../hawq/addStandby/step3_controller.js         |   2 +-
 .../app/mappers/components_state_mapper.js      |   5 +
 ambari-web/app/messages.js                      |   5 +
 .../app/models/alerts/alert_definition.js       |   4 +-
 ambari-web/app/views.js                         |   1 +
 .../configs/widgets/list_config_widget_view.js  |  11 +-
 ambari-web/app/views/main/dashboard/widgets.js  |  19 +-
 .../main/dashboard/widgets/hawqsegment_live.js  | 190 +++++++++++++++++++
 ambari-web/test/controllers/installer_test.js   |   2 +-
 .../progress_popup_controller_test.js           |   4 +-
 ...anage_alert_notifications_controller_test.js |   8 +-
 .../main/service/info/config_test.js            |   2 +-
 .../widgets/create/step2_controller_test.js     |   2 +-
 .../test/controllers/main/service_test.js       |  12 --
 .../test/controllers/wizard/step3_test.js       |  18 --
 .../test/controllers/wizard/step7_test.js       |   2 +-
 .../test/controllers/wizard/step9_test.js       |   7 +-
 .../test/mappers/server_data_mapper_test.js     |   8 +-
 .../mixins/common/configs/configs_saver_test.js |  10 +-
 .../host_components/install_component_test.js   |   2 +-
 .../test/models/alerts/alert_instance_test.js   |   3 +-
 ambari-web/test/utils/form_field_test.js        |   2 +-
 .../widgets/slider_config_widget_view_test.js   |   4 +-
 ambari-web/test/views/common/table_view_test.js |   2 +-
 .../admin/stack_upgrade/services_view_test.js   |   6 +-
 .../views/main/alert_definitions_view_test.js   |   4 +-
 .../test/views/main/dashboard/widget_test.js    |  18 +-
 .../dashboard/widgets/hawqsegment_live_test.js  |  69 +++++++
 .../ambari_metrics/regionserver_base_test.js    |   2 +-
 ambari-web/test/views/wizard/step5_view_test.js |   4 +-
 ambari-web/test/views/wizard/step9_view_test.js |  31 ++-
 100 files changed, 1564 insertions(+), 378 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/2eea1bfa/ambari-server/src/main/java/org/apache/ambari/server/configuration/Configuration.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/2eea1bfa/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ServiceComponentDesiredStateEntity.java
----------------------------------------------------------------------
diff --cc ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ServiceComponentDesiredStateEntity.java
index 65cc107,630eef2..519e4e6
--- a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ServiceComponentDesiredStateEntity.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ServiceComponentDesiredStateEntity.java
@@@ -151,35 -115,14 +154,43 @@@ public class ServiceComponentDesiredSta
      this.desiredStack = desiredStack;
    }
  
 +  /**
 +   * Adds a historical entry for the version of this service component. New
 +   * entries are automatically created when this entities is merged via a
 +   * {@link CascadeType#MERGE}.
 +   *
 +   * @param historicalEntry
 +   *          the entry to add.
 +   */
 +  public void addHistory(ServiceComponentHistoryEntity historicalEntry) {
 +    if (null == serviceComponentHistory) {
 +      serviceComponentHistory = new ArrayList<>();
 +    }
 +
 +    serviceComponentHistory.add(historicalEntry);
 +
 +    if (!equals(historicalEntry.getServiceComponentDesiredState())) {
 +      historicalEntry.setServiceComponentDesiredState(this);
 +    }
 +  }
 +
 +  /**
 +   * Gets the history of this component's upgrades and downgrades.
 +   *
 +   * @return the component history, or {@code null} if none.
 +   */
 +  public Collection<ServiceComponentHistoryEntity> getHistory() {
 +    return serviceComponentHistory;
 +  }
 +
+   public boolean isRecoveryEnabled() {
+     return recoveryEnabled != 0;
+   }
+ 
+   public void setRecoveryEnabled(boolean recoveryEnabled) {
+     this.recoveryEnabled = (recoveryEnabled == false) ? 0 : 1;
+   }
+ 
    @Override
    public boolean equals(Object o) {
      if (this == o) {

http://git-wip-us.apache.org/repos/asf/ambari/blob/2eea1bfa/ambari-server/src/main/java/org/apache/ambari/server/state/ServiceComponentImpl.java
----------------------------------------------------------------------
diff --cc ambari-server/src/main/java/org/apache/ambari/server/state/ServiceComponentImpl.java
index 4afc857,7b866a9..eca911d
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/ServiceComponentImpl.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/ServiceComponentImpl.java
@@@ -95,7 -95,8 +95,7 @@@ public class ServiceComponentImpl imple
      desiredStateEntity.setDesiredState(State.INIT);
      desiredStateEntity.setServiceName(service.getName());
      desiredStateEntity.setClusterId(service.getClusterId());
- 
+     desiredStateEntity.setRecoveryEnabled(false);
 -    desiredStateEntityPK = getDesiredStateEntityPK(desiredStateEntity);
      setDesiredStackVersion(service.getDesiredStackVersion());
  
      hostComponents = new HashMap<String, ServiceComponentHost>();
@@@ -178,9 -180,66 +178,58 @@@
  
    @Override
    public String getName() {
 -    ServiceComponentDesiredStateEntity desiredStateEntity = getDesiredStateEntity();
 -    if (desiredStateEntity != null) {
 -      return desiredStateEntity.getComponentName();
 -    } else {
 -      LOG.warn("Trying to fetch a member from an entity object that may " +
 -        "have been previously deleted, serviceName = " + getServiceName() + ", " +
 -        "componentName = " + componentName);
 -    }
 -    return null;
 +    return componentName;
    }
  
+   /**
+    * Get the recoveryEnabled value.
+    *
+    * @return true or false
+    */
+   @Override
+   public boolean isRecoveryEnabled() {
+     ServiceComponentDesiredStateEntity desiredStateEntity = getDesiredStateEntity();
+     if (desiredStateEntity != null) {
+       return desiredStateEntity.isRecoveryEnabled();
+     } else {
+       LOG.warn("Trying to fetch a member from an entity object that may " +
+               "have been previously deleted, serviceName = " + service.getName() + ", " +
+               "componentName = " + componentName);
+     }
+     return false;
+   }
+ 
+   /**
+    * Set the recoveryEnabled field in the entity object.
+    *
+    * @param recoveryEnabled - true or false
+    */
+   @Override
+   public void setRecoveryEnabled(boolean recoveryEnabled) {
+     readWriteLock.writeLock().lock();
+     try {
+       if (LOG.isDebugEnabled()) {
+         LOG.debug("Setting RecoveryEnabled of Component" + ", clusterName="
+                 + service.getCluster().getClusterName() + ", clusterId="
+                 + service.getCluster().getClusterId() + ", serviceName="
+                 + service.getName() + ", componentName=" + getName()
+                 + ", oldRecoveryEnabled=" + isRecoveryEnabled() + ", newRecoveryEnabled="
+                 + recoveryEnabled);
+       }
+       ServiceComponentDesiredStateEntity desiredStateEntity = getDesiredStateEntity();
+       if (desiredStateEntity != null) {
+         desiredStateEntity.setRecoveryEnabled(recoveryEnabled);
 -        saveIfPersisted();
++        saveIfPersisted(desiredStateEntity);
+       } else {
+         LOG.warn("Setting a member on an entity object that may have been " +
+                 "previously deleted, serviceName = " + service.getName());
+       }
+ 
+     } finally {
+       readWriteLock.writeLock().unlock();
+     }
+   }
+ 
    @Override
    public String getServiceName() {
      return service.getName();

http://git-wip-us.apache.org/repos/asf/ambari/blob/2eea1bfa/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClusterImpl.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/2eea1bfa/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog240.java
----------------------------------------------------------------------
diff --cc ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog240.java
index 09f31e4,6a8ead1..f2055a6
--- a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog240.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog240.java
@@@ -127,10 -117,8 +129,11 @@@ public class UpgradeCatalog240 extends 
    @Override
    protected void executeDDLUpdates() throws AmbariException, SQLException {
      updateAdminPermissionTable();
+     updateServiceComponentDesiredStateTable();
      createSettingTable();
 +    updateRepoVersionTableDDL();
 +    updateServiceComponentDesiredStateTableDDL();
 +    createServiceComponentHistoryTable();
    }
  
    @Override
@@@ -408,158 -395,13 +411,169 @@@
    }
  
    /**
 +   * Makes the following changes to the {@value #REPO_VERSION_TABLE} table:
 +   * <ul>
 +   * <li>repo_type VARCHAR(255) DEFAULT 'STANDARD' NOT NULL</li>
 +   * <li>version_url VARCHAR(1024)</li>
 +   * <li>version_xml MEDIUMTEXT</li>
 +   * <li>version_xsd VARCHAR(512)</li>
 +   * <li>parent_id BIGINT</li>
 +   * </ul>
 +   *
 +   * @throws SQLException
 +   */
 +  private void updateRepoVersionTableDDL() throws SQLException {
 +    DBColumnInfo repoTypeColumn = new DBColumnInfo("repo_type", String.class, 255, RepositoryType.STANDARD.name(), false);
 +    DBColumnInfo versionUrlColumn = new DBColumnInfo("version_url", String.class, 1024, null, true);
 +    DBColumnInfo versionXmlColumn = new DBColumnInfo("version_xml", Clob.class, null, null, true);
 +    DBColumnInfo versionXsdColumn = new DBColumnInfo("version_xsd", String.class, 512, null, true);
 +    DBColumnInfo parentIdColumn = new DBColumnInfo("parent_id", Long.class, null, null, true);
 +
 +    dbAccessor.addColumn(REPO_VERSION_TABLE, repoTypeColumn);
 +    dbAccessor.addColumn(REPO_VERSION_TABLE, versionUrlColumn);
 +    dbAccessor.addColumn(REPO_VERSION_TABLE, versionXmlColumn);
 +    dbAccessor.addColumn(REPO_VERSION_TABLE, versionXsdColumn);
 +    dbAccessor.addColumn(REPO_VERSION_TABLE, parentIdColumn);
 +  }
 +
 +  /**
 +   * Makes the following changes to the {@value #SERVICE_COMPONENT_DS_TABLE} table,
 +   * but only if the table doesn't have it's new PK set.
 +   * <ul>
 +   * <li>id BIGINT NOT NULL</li>
 +   * <li>Drops FKs on {@value #HOST_COMPONENT_DS_TABLE} and {@value #HOST_COMPONENT_STATE_TABLE}</li>
 +   * <li>Populates {@value #SQLException#ID} in {@value #SERVICE_COMPONENT_DS_TABLE}</li>
 +   * <li>Creates {@code UNIQUE} constraint on {@value #HOST_COMPONENT_DS_TABLE}</li>
 +   * <li>Adds FKs on {@value #HOST_COMPONENT_DS_TABLE} and {@value #HOST_COMPONENT_STATE_TABLE}</li>
 +   * <li>Adds new sequence value of {@code servicecomponentdesiredstate_id_seq}</li>
 +   * </ul>
 +   *
 +   * @throws SQLException
 +   */
 +  @Transactional
 +  private void updateServiceComponentDesiredStateTableDDL() throws SQLException {
 +    if (dbAccessor.tableHasPrimaryKey(SERVICE_COMPONENT_DS_TABLE, ID)) {
 +      LOG.info("Skipping {} table Primary Key modifications since the new {} column already exists",
 +          SERVICE_COMPONENT_DS_TABLE, ID);
 +
 +      return;
 +    }
 +
 +    // drop FKs to SCDS in both HCDS and HCS tables
 +    dbAccessor.dropFKConstraint(HOST_COMPONENT_DS_TABLE, "hstcmpnntdesiredstatecmpnntnme");
 +    dbAccessor.dropFKConstraint(HOST_COMPONENT_STATE_TABLE, "hstcomponentstatecomponentname");
 +
 +    // remove existing compound PK
 +    dbAccessor.dropPKConstraint(SERVICE_COMPONENT_DS_TABLE, "servicecomponentdesiredstate_pkey");
 +
 +    // add new PK column to SCDS, making it nullable for now
 +    DBColumnInfo idColumn = new DBColumnInfo(ID, Long.class, null, null, true);
 +    dbAccessor.addColumn(SERVICE_COMPONENT_DS_TABLE, idColumn);
 +
 +    // populate SCDS id column
 +    AtomicLong scdsIdCounter = new AtomicLong(1);
 +    Statement statement = null;
 +    ResultSet resultSet = null;
 +    try {
 +      statement = dbAccessor.getConnection().createStatement();
 +      if (statement != null) {
 +        String selectSQL = String.format("SELECT cluster_id, service_name, component_name FROM %s",
 +            SERVICE_COMPONENT_DS_TABLE);
 +
 +        resultSet = statement.executeQuery(selectSQL);
 +        while (null != resultSet && resultSet.next()) {
 +          final Long clusterId = resultSet.getLong("cluster_id");
 +          final String serviceName = resultSet.getString("service_name");
 +          final String componentName = resultSet.getString("component_name");
 +
 +          String updateSQL = String.format(
 +              "UPDATE %s SET %s = %d WHERE cluster_id = %d AND service_name = '%s' AND component_name = '%s'",
 +              SERVICE_COMPONENT_DS_TABLE, ID, scdsIdCounter.getAndIncrement(), clusterId,
 +              serviceName, componentName);
 +
 +          dbAccessor.executeQuery(updateSQL);
 +        }
 +      }
 +    } finally {
 +      JdbcUtils.closeResultSet(resultSet);
 +      JdbcUtils.closeStatement(statement);
 +    }
 +
 +    // make the column NON NULL now
 +    dbAccessor.alterColumn(SERVICE_COMPONENT_DS_TABLE,
 +        new DBColumnInfo(ID, Long.class, null, null, false));
 +
 +    // create a new PK, matching the name of the constraint found in SQL
 +    dbAccessor.addPKConstraint(SERVICE_COMPONENT_DS_TABLE, "pk_sc_desiredstate", ID);
 +
 +    // create UNIQUE constraint, ensuring column order matches SQL files
 +    String[] uniqueColumns = new String[] { "component_name", "service_name", "cluster_id" };
 +    dbAccessor.addUniqueConstraint(SERVICE_COMPONENT_DS_TABLE, "unq_scdesiredstate_name",
 +        uniqueColumns);
 +
 +    // add FKs back to SCDS in both HCDS and HCS tables
 +    dbAccessor.addFKConstraint(HOST_COMPONENT_DS_TABLE, "hstcmpnntdesiredstatecmpnntnme",
 +        uniqueColumns, SERVICE_COMPONENT_DS_TABLE, uniqueColumns, false);
 +
 +    dbAccessor.addFKConstraint(HOST_COMPONENT_STATE_TABLE, "hstcomponentstatecomponentname",
 +        uniqueColumns, SERVICE_COMPONENT_DS_TABLE, uniqueColumns, false);
 +
 +    // Add sequence for SCDS id
 +    addSequence("servicecomponentdesiredstate_id_seq", scdsIdCounter.get(), false);
 +  }
 +
 +  /**
 +   * Makes the following changes to the {@value #SERVICE_COMPONENT_HISTORY_TABLE} table:
 +   * <ul>
 +   * <li>id BIGINT NOT NULL</li>
 +   * <li>component_id BIGINT NOT NULL</li>
 +   * <li>upgrade_id BIGINT NOT NULL</li>
 +   * <li>from_stack_id BIGINT NOT NULL</li>
 +   * <li>to_stack_id BIGINT NOT NULL</li>
 +   * <li>CONSTRAINT PK_sc_history PRIMARY KEY (id)</li>
 +   * <li>CONSTRAINT FK_sc_history_component_id FOREIGN KEY (component_id) REFERENCES servicecomponentdesiredstate (id)</li>
 +   * <li>CONSTRAINT FK_sc_history_upgrade_id FOREIGN KEY (upgrade_id) REFERENCES upgrade (upgrade_id)</li>
 +   * <li>CONSTRAINT FK_sc_history_from_stack_id FOREIGN KEY (from_stack_id) REFERENCES stack (stack_id)</li>
 +   * <li>CONSTRAINT FK_sc_history_to_stack_id FOREIGN KEY (to_stack_id) REFERENCES stack (stack_id)</li>
 +   * <li>Creates the {@code servicecomponent_history_id_seq}</li>
 +   * </ul>
 +   *
 +   * @throws SQLException
 +   */
 +  private void createServiceComponentHistoryTable() throws SQLException {
 +    List<DBColumnInfo> columns = new ArrayList<>();
 +    columns.add(new DBColumnInfo(ID, Long.class, null, null, false));
 +    columns.add(new DBColumnInfo("component_id", Long.class, null, null, false));
 +    columns.add(new DBColumnInfo("upgrade_id", Long.class, null, null, false));
 +    columns.add(new DBColumnInfo("from_stack_id", Long.class, null, null, false));
 +    columns.add(new DBColumnInfo("to_stack_id", Long.class, null, null, false));
 +    dbAccessor.createTable(SERVICE_COMPONENT_HISTORY_TABLE, columns, (String[]) null);
 +
 +    dbAccessor.addPKConstraint(SERVICE_COMPONENT_HISTORY_TABLE, "PK_sc_history", ID);
 +
 +    dbAccessor.addFKConstraint(SERVICE_COMPONENT_HISTORY_TABLE, "FK_sc_history_component_id",
 +        "component_id", SERVICE_COMPONENT_DS_TABLE, "id", false);
 +
 +    dbAccessor.addFKConstraint(SERVICE_COMPONENT_HISTORY_TABLE, "FK_sc_history_upgrade_id",
 +        "upgrade_id", UPGRADE_TABLE, "upgrade_id", false);
 +
 +    dbAccessor.addFKConstraint(SERVICE_COMPONENT_HISTORY_TABLE, "FK_sc_history_from_stack_id",
 +        "from_stack_id", STACK_TABLE, "stack_id", false);
 +
 +    dbAccessor.addFKConstraint(SERVICE_COMPONENT_HISTORY_TABLE, "FK_sc_history_to_stack_id",
 +        "to_stack_id", STACK_TABLE, "stack_id", false);
 +
 +    addSequence("servicecomponent_history_id_seq", 0L, false);
 +  }
++
++  /**
+    * Alter servicecomponentdesiredstate table to add recovery_enabled column.
+    * @throws SQLException
+    */
+   private void updateServiceComponentDesiredStateTable() throws SQLException {
+     // ALTER TABLE servicecomponentdesiredstate ADD COLUMN
+     // recovery_enabled SMALLINT DEFAULT 0 NOT NULL
+     dbAccessor.addColumn(SERVICE_COMPONENT_DESIRED_STATE_TABLE,
 -            new DBAccessor.DBColumnInfo(RECOVERY_ENABLED_COL, Short.class, null, 0, false));
++            new DBColumnInfo(RECOVERY_ENABLED_COL, Short.class, null, 0, false));
+   }
  }

http://git-wip-us.apache.org/repos/asf/ambari/blob/2eea1bfa/ambari-server/src/main/resources/Ambari-DDL-Derby-CREATE.sql
----------------------------------------------------------------------
diff --cc ambari-server/src/main/resources/Ambari-DDL-Derby-CREATE.sql
index 2db745b,6d63a90..bd7755c
--- a/ambari-server/src/main/resources/Ambari-DDL-Derby-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-Derby-CREATE.sql
@@@ -177,8 -176,8 +177,9 @@@ CREATE TABLE servicecomponentdesiredsta
    desired_stack_id BIGINT NOT NULL,
    desired_state VARCHAR(255) NOT NULL,
    service_name VARCHAR(255) NOT NULL,
+   recovery_enabled SMALLINT NOT NULL DEFAULT 0,
 -  PRIMARY KEY (component_name, cluster_id, service_name)
 +  CONSTRAINT pk_sc_desiredstate PRIMARY KEY (id),
 +  CONSTRAINT unq_scdesiredstate_name UNIQUE(component_name, service_name, cluster_id)
  );
  
  CREATE TABLE servicedesiredstate (

http://git-wip-us.apache.org/repos/asf/ambari/blob/2eea1bfa/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql
----------------------------------------------------------------------
diff --cc ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql
index b892bc8,2898ab7..ac1c5d7
--- a/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql
@@@ -178,8 -177,8 +178,9 @@@ CREATE TABLE servicecomponentdesiredsta
    desired_stack_id BIGINT NOT NULL,
    desired_state VARCHAR(255) NOT NULL,
    service_name VARCHAR(100) NOT NULL,
+   recovery_enabled SMALLINT NOT NULL DEFAULT 0,
 -  PRIMARY KEY (component_name, cluster_id, service_name)
 +  CONSTRAINT pk_sc_desiredstate PRIMARY KEY (id),
 +  CONSTRAINT unq_scdesiredstate_name UNIQUE(component_name, service_name, cluster_id)
  );
  
  CREATE TABLE servicedesiredstate (

http://git-wip-us.apache.org/repos/asf/ambari/blob/2eea1bfa/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql
----------------------------------------------------------------------
diff --cc ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql
index 026efea,092f8c2..4ed3a19
--- a/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql
@@@ -168,8 -167,8 +168,9 @@@ CREATE TABLE servicecomponentdesiredsta
    desired_stack_id NUMBER(19) NOT NULL,
    desired_state VARCHAR2(255) NOT NULL,
    service_name VARCHAR2(255) NOT NULL,
+   recovery_enabled SMALLINT DEFAULT 0 NOT NULL,
 -  PRIMARY KEY (component_name, cluster_id, service_name)
 +  CONSTRAINT pk_sc_desiredstate PRIMARY KEY (id),
 +  CONSTRAINT unq_scdesiredstate_name UNIQUE(component_name, service_name, cluster_id)
  );
  
  CREATE TABLE servicedesiredstate (

http://git-wip-us.apache.org/repos/asf/ambari/blob/2eea1bfa/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql
----------------------------------------------------------------------
diff --cc ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql
index fb9889d,150ea9b..5d7be25
--- a/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql
@@@ -177,8 -176,8 +177,9 @@@ CREATE TABLE servicecomponentdesiredsta
    desired_stack_id BIGINT NOT NULL,
    desired_state VARCHAR(255) NOT NULL,
    service_name VARCHAR(255) NOT NULL,
+   recovery_enabled SMALLINT NOT NULL DEFAULT 0,
 -  PRIMARY KEY (component_name, cluster_id, service_name)
 +  CONSTRAINT pk_sc_desiredstate PRIMARY KEY (id),
 +  CONSTRAINT unq_scdesiredstate_name UNIQUE(component_name, service_name, cluster_id)
  );
  
  CREATE TABLE servicedesiredstate (

http://git-wip-us.apache.org/repos/asf/ambari/blob/2eea1bfa/ambari-server/src/main/resources/Ambari-DDL-Postgres-EMBEDDED-CREATE.sql
----------------------------------------------------------------------
diff --cc ambari-server/src/main/resources/Ambari-DDL-Postgres-EMBEDDED-CREATE.sql
index 3cc7516,0443336..c032b8f
--- a/ambari-server/src/main/resources/Ambari-DDL-Postgres-EMBEDDED-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-Postgres-EMBEDDED-CREATE.sql
@@@ -202,8 -201,8 +202,9 @@@ CREATE TABLE ambari.servicecomponentdes
    desired_stack_id BIGINT NOT NULL,
    desired_state VARCHAR(255) NOT NULL,
    service_name VARCHAR(255) NOT NULL,
+   recovery_enabled SMALLINT NOT NULL DEFAULT 0,
 -  PRIMARY KEY (component_name, cluster_id, service_name)
 +  CONSTRAINT pk_sc_desiredstate PRIMARY KEY (id),
 +  CONSTRAINT unq_scdesiredstate_name UNIQUE(component_name, service_name, cluster_id)
  );
  GRANT ALL PRIVILEGES ON TABLE ambari.servicecomponentdesiredstate TO :username;
  

http://git-wip-us.apache.org/repos/asf/ambari/blob/2eea1bfa/ambari-server/src/main/resources/Ambari-DDL-SQLAnywhere-CREATE.sql
----------------------------------------------------------------------
diff --cc ambari-server/src/main/resources/Ambari-DDL-SQLAnywhere-CREATE.sql
index a5bfdc2,20a7634..6a6b77b
--- a/ambari-server/src/main/resources/Ambari-DDL-SQLAnywhere-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-SQLAnywhere-CREATE.sql
@@@ -167,8 -166,8 +167,9 @@@ CREATE TABLE servicecomponentdesiredsta
    desired_stack_id NUMERIC(19) NOT NULL,
    desired_state VARCHAR(255) NOT NULL,
    service_name VARCHAR(255) NOT NULL,
+   recovery_enabled SMALLINT NOT NULL DEFAULT 0,
 -  PRIMARY KEY (component_name, cluster_id, service_name)
 +  CONSTRAINT pk_sc_desiredstate PRIMARY KEY (id),
 +  CONSTRAINT unq_scdesiredstate_name UNIQUE(component_name, service_name, cluster_id)
  );
  
  CREATE TABLE servicedesiredstate (

http://git-wip-us.apache.org/repos/asf/ambari/blob/2eea1bfa/ambari-server/src/main/resources/Ambari-DDL-SQLServer-CREATE.sql
----------------------------------------------------------------------
diff --cc ambari-server/src/main/resources/Ambari-DDL-SQLServer-CREATE.sql
index 8e5b2f8,2c9adf3..43419c1
--- a/ambari-server/src/main/resources/Ambari-DDL-SQLServer-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-SQLServer-CREATE.sql
@@@ -187,10 -186,9 +187,9 @@@ CREATE TABLE servicecomponentdesiredsta
    desired_stack_id BIGINT NOT NULL,
    desired_state VARCHAR(255) NOT NULL,
    service_name VARCHAR(255) NOT NULL,
-   PRIMARY KEY CLUSTERED (id),
 -  recovery_enabled SMALLINT NOT NULL DEFAULT 0,
 -  PRIMARY KEY CLUSTERED (component_name, cluster_id, service_name)
 -  );
 +  CONSTRAINT pk_sc_desiredstate PRIMARY KEY (id),
 +  CONSTRAINT unq_scdesiredstate_name UNIQUE(component_name, service_name, cluster_id)
 +);
  
  CREATE TABLE servicedesiredstate (
    cluster_id BIGINT NOT NULL,

http://git-wip-us.apache.org/repos/asf/ambari/blob/2eea1bfa/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/2eea1bfa/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog240Test.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/2eea1bfa/ambari-web/app/messages.js
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/2eea1bfa/ambari-web/app/views.js
----------------------------------------------------------------------


[19/50] [abbrv] ambari git commit: AMBARI-14946. Create python script for generating version definition file (ncole)

Posted by nc...@apache.org.
AMBARI-14946. Create python script for generating version definition file (ncole)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/0f2c3375
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/0f2c3375
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/0f2c3375

Branch: refs/heads/trunk
Commit: 0f2c3375da8b3a5ab5739a5ae79db44bf5f91815
Parents: a256130
Author: Nate Cole <nc...@hortonworks.com>
Authored: Fri Feb 5 14:22:28 2016 -0500
Committer: Nate Cole <nc...@hortonworks.com>
Committed: Fri Feb 5 14:22:28 2016 -0500

----------------------------------------------------------------------
 .../src/main/resources/version_definition.xsd   |   7 +-
 contrib/version-builder/example.sh              |  44 +++
 contrib/version-builder/version_builder.py      | 354 +++++++++++++++++++
 3 files changed, 402 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/0f2c3375/ambari-server/src/main/resources/version_definition.xsd
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/version_definition.xsd b/ambari-server/src/main/resources/version_definition.xsd
index 2efdd77..77b4203 100644
--- a/ambari-server/src/main/resources/version_definition.xsd
+++ b/ambari-server/src/main/resources/version_definition.xsd
@@ -19,7 +19,7 @@
 <xs:schema xmlns:xs="http://www.w3.org/2001/XMLSchema">
   <xs:annotation>
     <xs:documentation>
-    This XSD is used to validate a repo definition file.  You can verify the XML is valid
+    This XSD is used to validate a version definition file.  You can verify the XML is valid
     by running (on Linux):
     xmllint --noout --load-trace --schema [path-to-this-file] [path-to-xml]
     </xs:documentation>
@@ -31,9 +31,10 @@
      <xs:element name="stack-id" type="xs:string" />
      <xs:element name="version" type="xs:string" />
      <xs:element name="build" type="xs:string" />
-     <xs:element name="compatible-with" type="xs:string" minOccurs="0" maxOccurs="1" />
+     <xs:element name="compatible-with" type="xs:string" minOccurs="0"/>
      <xs:element name="release-notes" type="xs:string" maxOccurs="1" />
      <xs:element name="display" type="xs:string" minOccurs="0" />
+     <xs:element name="package-version" type="xs:string" minOccurs="0" />
     </xs:all>
   </xs:complexType>
   
@@ -183,4 +184,4 @@
 
   </xs:element>
   
-</xs:schema>
\ No newline at end of file
+</xs:schema>

http://git-wip-us.apache.org/repos/asf/ambari/blob/0f2c3375/contrib/version-builder/example.sh
----------------------------------------------------------------------
diff --git a/contrib/version-builder/example.sh b/contrib/version-builder/example.sh
new file mode 100755
index 0000000..a93ddb6
--- /dev/null
+++ b/contrib/version-builder/example.sh
@@ -0,0 +1,44 @@
+#!/bin/sh
+
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+filename="version_241-12345.xml"
+
+python version_builder.py --file $filename --release-type PATCH
+python version_builder.py --file $filename --release-stack HDP-2.3
+python version_builder.py --file $filename --release-version 2.4.1.1
+python version_builder.py --file $filename --release-build 12345
+python version_builder.py --file $filename --release-notes http://example.com
+python version_builder.py --file $filename --release-display HDP-2.4.1.1-1234-patch
+python version_builder.py --file $filename --release-compatible 2.4.[0-1].0
+
+# call any number of times for each service in the repo
+python version_builder.py --file $filename --manifest --manifest-id HDFS-271 --manifest-service HDFS --manifest-version 2.7.1.2.4
+python version_builder.py --file $filename --manifest --manifest-id HBASE-132 --manifest-service HBASE --manifest-version 1.3.2.4.3
+
+#call any number of times for the target services to upgrade
+python version_builder.py --file $filename --available --manifest-id HDFS-271
+
+#call any number of times for repo per os
+python version_builder.py --file $filename --repo --repo-os redhat6 --repo-id HDP-2.3 --repo-name HDP --repo-url http://public-repo-1.hortonworks.com/HDP/centos6/2.x/updates/2.3.4.0
+python version_builder.py --file $filename --repo --repo-os redhat6 --repo-id HDP-UTILS-1.1.0.20 --repo-name HDP-UTILS --repo-url http://public-repo-1.hortonworks.com/HDP-UTILS-1.1.0.20/repos/centos6
+
+python version_builder.py --file $filename --finalize --xsd ../../ambari-server/src/main/resources/version_definition.xsd
+
+# to upload this to running Ambari instance on localhost:
+# curl -u admin:admin -H 'Content-Type: text/xml' -X POST -d @$filename http://localhost:8080/api/v1/version_definitions

http://git-wip-us.apache.org/repos/asf/ambari/blob/0f2c3375/contrib/version-builder/version_builder.py
----------------------------------------------------------------------
diff --git a/contrib/version-builder/version_builder.py b/contrib/version-builder/version_builder.py
new file mode 100644
index 0000000..6c20a47
--- /dev/null
+++ b/contrib/version-builder/version_builder.py
@@ -0,0 +1,354 @@
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+
+import optparse
+import os
+import subprocess
+import sys
+import xml.etree.ElementTree as ET
+
+def load_file(filename):
+  """
+  Loads the specified XML file
+  """
+  if os.path.exists(filename):
+    tree = ET.ElementTree()
+    tree.parse(filename)
+    root = tree.getroot()
+  else:
+    attribs = {}
+    attribs['xmlns:xsi'] = "http://www.w3.org/2001/XMLSchema-instance"
+    attribs['xsi:noNamespaceSchemaLocation'] = "version_definition.xsd"
+    root = ET.Element("repository-version", attribs)
+
+    ET.SubElement(root, "release")
+    ET.SubElement(root, "manifest")
+    ET.SubElement(root, "available-services")
+    ET.SubElement(root, "repository-info")
+
+  return root
+
+def save_file(xml, filename):
+  """
+  Saves the XML file
+  """
+  p = subprocess.Popen(['xmllint', '--format', '--output', filename, '-'], stdout=subprocess.PIPE, stdin=subprocess.PIPE)
+  (stdout, stderr) = p.communicate(input=ET.tostring(xml))
+
+def check_xmllint():
+  """
+  Verifies utility xmllint is available
+  """
+  try:
+    p = subprocess.Popen(['xmllint', '--version'], stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=False)
+    (stdout, stderr) = p.communicate()
+
+    if p.returncode != 0:
+      raise Exception("xmllint command does not appear to be available")
+
+  except:
+    raise Exception("xmllint command does not appear to be available")
+  
+
+def validate_file(filename, xsdfile):
+  """
+  Validates the XML file against the XSD
+  """
+  args = ['xmllint', '--noout', '--load-trace', '--schema', xsdfile, filename]
+
+  p = subprocess.Popen(args, stdout=subprocess.PIPE, stdin=subprocess.PIPE, stderr=subprocess.PIPE)
+  (stdout, stderr) = p.communicate()
+
+  if p.returncode != 0:
+    raise Exception(stderr)
+
+  if len(stdout) > 0:
+    print stdout
+
+  if len(stderr) > 0:
+    print stderr
+
+
+def update_simple(parent, name, value):
+  """
+  Helper method to either update or create the element
+  """
+  element = parent.find('./' + name) 
+
+  if element is None:
+    element = ET.SubElement(parent, name)
+    element.text = value
+  else:
+    element.text = value
+
+def process_release(xmlroot, options):
+  """
+  Create elements of the 'release' parent
+  """
+  release_element = xmlroot.find("./release")
+
+  if release_element is None:
+    raise Exception("Element 'release' is not found")
+
+  if options.release_type:
+    update_simple(release_element, "type", options.release_type)
+
+  if options.release_stack:
+    update_simple(release_element, "stack-id", options.release_stack)
+
+  if options.release_version:
+    update_simple(release_element, "version", options.release_version)
+
+  if options.release_build:
+    update_simple(release_element, "build", options.release_build)
+
+  if options.release_compatible:
+    update_simple(release_element, "compatible-with", options.release_compatible)
+
+  if options.release_notes:
+    update_simple(release_element, "release-notes", options.release_notes)
+
+  if options.release_display:
+    update_simple(release_element, "display", options.release_display)
+
+  if options.release_package_version:
+    update_simple(release_element, "package-version", options.release_package_version)
+
+def process_manifest(xmlroot, options):
+  """
+  Creates the manifest element
+  """
+  if not options.manifest:
+    return
+
+  manifest_element = xmlroot.find("./manifest")
+
+  if manifest_element is None:
+    raise Exception("Element 'manifest' is not found")
+
+  service_element = manifest_element.find("./service[@id='{0}']".format(options.manifest_id))
+
+  if service_element is None:
+    service_element = ET.SubElement(manifest_element, "service")
+    service_element.set('id', options.manifest_id)
+
+  service_element.set('name', options.manifest_service)
+  service_element.set('version', options.manifest_version)
+  if options.manifest_version_id:
+    service_element.set('version-id', options.manifest_version_id)
+
+def process_available(xmlroot, options):
+  """
+  Processes available service elements
+  """
+  if not options.available:
+    return
+
+  manifest_element = xmlroot.find("./manifest")
+  if manifest_element is None:
+    raise Exception("'manifest' element is not found")
+
+  service_element = manifest_element.find("./service[@id='{0}']".format(options.manifest_id))
+  if service_element is None:
+    raise Exception("Cannot add an available service for {0}; it's not on the manifest".format(options.manifest_id))
+
+  available_element = xmlroot.find("./available-services")
+  if available_element is None:
+    raise Exception("'available-services' is not found")
+
+  service_element = available_element.find("./service[@idref='{0}']".format(options.manifest_id))
+
+  if service_element is not None:
+    available_element.remove(service_element) 
+
+  service_element = ET.SubElement(available_element, "service")
+  service_element.set('idref', options.manifest_id)
+
+  if options.available_components:
+    components = options.available_components.split(',')
+    for component in components:
+      e = ET.SubElement(service_element, 'component')
+      e.text = component
+
+
+def process_repo(xmlroot, options):
+  """
+  Processes repository options.  This method doesn't update or create individual elements, it
+  creates the entire repo structure
+  """
+  if not options.repo:
+    return
+
+  repo_parent = xmlroot.find("./repository-info")
+  if repo_parent is None:
+    raise Exception("'repository-info' element is not found")
+
+  os_element = repo_parent.find("./os[@family='{0}']".format(options.repo_os))
+  if os_element is None:
+    os_element = ET.SubElement(repo_parent, 'os')
+    os_element.set('family', options.repo_os)
+
+  repo_element = os_element.find("./repo/[reponame='{0}']".format(options.repo_name))
+
+  if repo_element is not None:
+    os_element.remove(repo_element)
+
+  repo_element = ET.SubElement(os_element, 'repo')
+  e = ET.SubElement(repo_element, 'baseurl')
+  e.text = options.repo_url
+
+  e = ET.SubElement(repo_element, 'repoid')
+  e.text = options.repo_id
+
+  e = ET.SubElement(repo_element, 'reponame')
+  e.text = options.repo_name
+
+def validate_manifest(parser, options):
+  """
+  Validates manifest options from the command line
+  """
+  if not options.manifest:
+    return
+
+  template = "When specifying --manifest, {0} is also required"
+
+  if not options.manifest_id:
+    parser.error(template.format("--manifest-id"))
+  
+  if not options.manifest_service:
+    parser.error(template.format("--manifest-service"))
+
+  if not options.manifest_version:
+    parser.error(template.format("--manifest-version"))
+
+def validate_available(parser, options):
+  """
+  Validates available service options from the command line
+  """
+  if not options.available:
+    return
+
+  if not options.manifest_id:
+    parser.error("When specifying --available, --manifest-id is also required")
+
+def validate_repo(parser, options):
+  """
+  Validates repo options from the command line
+  """
+  if not options.repo:
+    return
+
+  template = "When specifying --repo, {0} is also required"
+
+  if not options.repo_os:
+    parser.error(template.format("--repo-os"))
+
+  if not options.repo_url:
+    parser.error(template.format("--repo-url"))
+
+  if not options.repo_id:
+    parser.error(template.format("--repo-id"))
+
+  if not options.repo_name:
+    parser.error(template.format("--repo-name"))
+
+
+def main(argv):
+  parser = optparse.OptionParser(
+    epilog="OS utility 'xmllint' is required for this tool to function.  It handles pretty-printing and XSD validation.")
+  
+  parser.add_option('--file', dest='filename',
+    help="The output XML file")
+
+  parser.add_option('--finalize', action='store_true', dest='finalize',
+    help="Finalize and validate the XML file")
+  parser.add_option('--xsd', dest='xsd_file',
+    help="The XSD location when finalizing")
+
+  parser.add_option('--release-type', type='choice', choices=['STANDARD', 'PATCH'], dest='release_type' ,
+    help="Indicate the release type: i.e. STANDARD or PATCH")
+  parser.add_option('--release-stack', dest='release_stack',
+    help="The stack id: e.g. HDP-2.4")
+  parser.add_option('--release-version', dest='release_version',
+    help="The release version without build number: e.g. 2.4.0.1")
+  parser.add_option('--release-build', dest='release_build',
+    help="The release build number: e.g. 1234")
+  parser.add_option('--release-compatible', dest='release_compatible',
+    help="Regular Expression string to identify version compatibility for patches: e.g. 2.4.1.[0-9]")
+  parser.add_option('--release-notes', dest='release_notes',
+    help="A http link to the documentation notes")
+  parser.add_option('--release-display', dest='release_display',
+    help="The display name for this release")
+  parser.add_option('--release-package-version', dest='release_package_version',
+    help="Identifier to use when installing packages, generally a part of the package name")
+
+  parser.add_option('--manifest', action='store_true', dest='manifest',
+    help="Add a manifest service with other options: --manifest-id, --manifest-service, --manifest-version, --manifest-version-id")
+  parser.add_option('--manifest-id', dest='manifest_id',
+    help="Unique ID for a service in a manifest.  Required when specifying --manifest and --available")
+  parser.add_option('--manifest-service', dest='manifest_service')
+  parser.add_option('--manifest-version', dest='manifest_version')
+  parser.add_option('--manifest-version-id', dest='manifest_version_id')
+
+  parser.add_option('--available', action='store_true', dest='available',
+    help="Add an available service with other options: --manifest-id, --available-components")
+  parser.add_option('--available-components', dest='available_components',
+    help="A CSV of service components that are intended to be upgraded via patch. \
+      Omitting this implies the entire service should be upgraded")
+
+  parser.add_option('--repo', action='store_true', dest='repo',
+    help="Add repository data with options: --repo-os, --repo-url, --repo-id, --repo-name")
+  parser.add_option('--repo-os', dest='repo_os',
+    help="The operating system type: i.e. redhat6, redhat7, debian7, ubuntu12, ubuntu14, suse11")
+  parser.add_option('--repo-url', dest='repo_url',
+    help="The base url for the repository data")
+  parser.add_option('--repo-id', dest='repo_id', help="The ID of the repo")
+  parser.add_option('--repo-name', dest='repo_name', help="The name of the repo")
+
+  (options, args) = parser.parse_args()
+
+  check_xmllint()
+
+  # validate_filename
+  if not options.filename:
+    parser.error("--file option is required")
+
+  validate_manifest(parser, options)
+  validate_available(parser, options)
+  validate_repo(parser, options)
+
+  # validate_finalize
+  if options.finalize and not options.xsd_file:
+    parser.error("Must supply XSD (--xsd) when finalizing")
+
+  # load file
+  root = load_file(options.filename)
+
+  process_release(root, options)
+  process_manifest(root, options)
+  process_available(root, options)
+  process_repo(root, options)
+
+  # save file
+  save_file(root, options.filename)
+
+  if options.finalize:
+    validate_file(options.filename, options.xsd_file)
+
+if __name__ == "__main__":
+  main(sys.argv)


[44/50] [abbrv] ambari git commit: AMBARI-14996. Component should support a desired version (dlysnichenko)

Posted by nc...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/cbef0c14/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/ComponentVersionCheckActionTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/ComponentVersionCheckActionTest.java b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/ComponentVersionCheckActionTest.java
index 89f0ba6..207b4c7 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/ComponentVersionCheckActionTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/ComponentVersionCheckActionTest.java
@@ -156,7 +156,7 @@ public class ComponentVersionCheckActionTest {
 
     // Create the starting repo version
     m_helper.getOrCreateRepositoryVersion(sourceStack, sourceRepo);
-    c.createClusterVersion(sourceStack, sourceRepo, "admin", RepositoryVersionState.UPGRADING);
+    c.createClusterVersion(sourceStack, sourceRepo, "admin", RepositoryVersionState.INSTALLING);
     c.transitionClusterVersion(sourceStack, sourceRepo, RepositoryVersionState.CURRENT);
 
     // Create the new repo version
@@ -168,8 +168,6 @@ public class ComponentVersionCheckActionTest {
     // Start upgrading the newer repo
     c.createClusterVersion(targetStack, targetRepo, "admin", RepositoryVersionState.INSTALLING);
     c.transitionClusterVersion(targetStack, targetRepo, RepositoryVersionState.INSTALLED);
-    c.transitionClusterVersion(targetStack, targetRepo, RepositoryVersionState.UPGRADING);
-    c.transitionClusterVersion(targetStack, targetRepo, RepositoryVersionState.UPGRADED);
     c.setCurrentStackVersion(targetStack);
 
     c.mapHostVersions(Collections.singleton(hostName), c.getCurrentClusterVersion(),
@@ -180,7 +178,7 @@ public class ComponentVersionCheckActionTest {
     HostVersionEntity entity = new HostVersionEntity();
     entity.setHostEntity(hostDAO.findByName(hostName));
     entity.setRepositoryVersion(repoVersionDAO.findByStackAndVersion(targetStack, targetRepo));
-    entity.setState(RepositoryVersionState.UPGRADED);
+    entity.setState(RepositoryVersionState.INSTALLED);
     hostVersionDAO.create(entity);
   }
 
@@ -216,7 +214,7 @@ public class ComponentVersionCheckActionTest {
 
     // Create the starting repo version
     m_helper.getOrCreateRepositoryVersion(sourceStack, sourceRepo);
-    c.createClusterVersion(sourceStack, sourceRepo, "admin", RepositoryVersionState.UPGRADING);
+    c.createClusterVersion(sourceStack, sourceRepo, "admin", RepositoryVersionState.INSTALLING);
     c.transitionClusterVersion(sourceStack, sourceRepo, RepositoryVersionState.CURRENT);
 
     // Create the new repo version
@@ -228,8 +226,6 @@ public class ComponentVersionCheckActionTest {
     // Start upgrading the newer repo
     c.createClusterVersion(targetStack, targetRepo, "admin", RepositoryVersionState.INSTALLING);
     c.transitionClusterVersion(targetStack, targetRepo, RepositoryVersionState.INSTALLED);
-    c.transitionClusterVersion(targetStack, targetRepo, RepositoryVersionState.UPGRADING);
-    c.transitionClusterVersion(targetStack, targetRepo, RepositoryVersionState.UPGRADED);
 
     c.mapHostVersions(Collections.singleton(hostName), c.getCurrentClusterVersion(),
         RepositoryVersionState.CURRENT);
@@ -239,7 +235,7 @@ public class ComponentVersionCheckActionTest {
     HostVersionEntity entity = new HostVersionEntity();
     entity.setHostEntity(hostDAO.findByName(hostName));
     entity.setRepositoryVersion(repoVersionDAO.findByStackAndVersion(targetStack, targetRepo));
-    entity.setState(RepositoryVersionState.UPGRADED);
+    entity.setState(RepositoryVersionState.INSTALLED);
     hostVersionDAO.create(entity);
   }
 
@@ -316,17 +312,17 @@ public class ComponentVersionCheckActionTest {
     ClusterVersionEntity upgradingClusterVersion = clusterVersionDAO.findByClusterAndStackAndVersion(
         "c1", HDP_22_STACK, targetRepo);
 
-    upgradingClusterVersion.setState(RepositoryVersionState.UPGRADING);
+    upgradingClusterVersion.setState(RepositoryVersionState.INSTALLING);
     upgradingClusterVersion = clusterVersionDAO.merge(upgradingClusterVersion);
 
     // verify the conditions for the test are met properly
     upgradingClusterVersion = clusterVersionDAO.findByClusterAndStackAndVersion("c1", HDP_22_STACK, targetRepo);
     List<HostVersionEntity> hostVersions = hostVersionDAO.findByClusterStackAndVersion("c1", HDP_22_STACK, targetRepo);
 
-    assertEquals(RepositoryVersionState.UPGRADING, upgradingClusterVersion.getState());
+    assertEquals(RepositoryVersionState.INSTALLING, upgradingClusterVersion.getState());
     assertTrue(hostVersions.size() > 0);
     for (HostVersionEntity hostVersion : hostVersions) {
-      assertEquals(RepositoryVersionState.UPGRADED, hostVersion.getState());
+      assertEquals(RepositoryVersionState.INSTALLED, hostVersion.getState());
     }
 
     // now finalize and ensure we can transition from UPGRADING to UPGRADED

http://git-wip-us.apache.org/repos/asf/ambari/blob/cbef0c14/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/ConfigureActionTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/ConfigureActionTest.java b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/ConfigureActionTest.java
index 14137cc..ce87668 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/ConfigureActionTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/ConfigureActionTest.java
@@ -703,7 +703,7 @@ public class ConfigureActionTest {
 
     // Creating starting repo
     m_helper.getOrCreateRepositoryVersion(HDP_220_STACK, HDP_2_2_0_0);
-    c.createClusterVersion(HDP_220_STACK, HDP_2_2_0_0, "admin", RepositoryVersionState.UPGRADING);
+    c.createClusterVersion(HDP_220_STACK, HDP_2_2_0_0, "admin", RepositoryVersionState.INSTALLING);
     c.transitionClusterVersion(HDP_220_STACK, HDP_2_2_0_0, RepositoryVersionState.CURRENT);
 
     String urlInfo = "[{'repositories':["
@@ -714,8 +714,6 @@ public class ConfigureActionTest {
 
     c.createClusterVersion(HDP_220_STACK, HDP_2_2_0_1, "admin", RepositoryVersionState.INSTALLING);
     c.transitionClusterVersion(HDP_220_STACK, HDP_2_2_0_1, RepositoryVersionState.INSTALLED);
-    c.transitionClusterVersion(HDP_220_STACK, HDP_2_2_0_1, RepositoryVersionState.UPGRADING);
-    c.transitionClusterVersion(HDP_220_STACK, HDP_2_2_0_1, RepositoryVersionState.UPGRADED);
     c.setCurrentStackVersion(HDP_220_STACK);
 
     c.mapHostVersions(Collections.singleton(hostName), c.getCurrentClusterVersion(),
@@ -726,7 +724,7 @@ public class ConfigureActionTest {
     HostVersionEntity entity = new HostVersionEntity();
     entity.setHostEntity(hostDAO.findByName(hostName));
     entity.setRepositoryVersion(repoVersionDAO.findByStackAndVersion(HDP_220_STACK, HDP_2_2_0_1));
-    entity.setState(RepositoryVersionState.UPGRADED);
+    entity.setState(RepositoryVersionState.INSTALLED);
     hostVersionDAO.create(entity);
 
     // verify that our configs are there

http://git-wip-us.apache.org/repos/asf/ambari/blob/cbef0c14/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/UpgradeActionTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/UpgradeActionTest.java b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/UpgradeActionTest.java
index f43642c..9608b79 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/UpgradeActionTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/UpgradeActionTest.java
@@ -83,6 +83,7 @@ import org.apache.ambari.server.state.stack.upgrade.UpgradeType;
 import org.junit.After;
 import org.junit.Assert;
 import org.junit.Before;
+import org.junit.Ignore;
 import org.junit.Test;
 
 import com.google.gson.Gson;
@@ -196,14 +197,13 @@ public class UpgradeActionTest {
 
     // Create the starting repo version
     m_helper.getOrCreateRepositoryVersion(sourceStack, sourceRepo);
-    c.createClusterVersion(sourceStack, sourceRepo, "admin", RepositoryVersionState.UPGRADING);
+    c.createClusterVersion(sourceStack, sourceRepo, "admin", RepositoryVersionState.INSTALLING);
     c.transitionClusterVersion(sourceStack, sourceRepo, RepositoryVersionState.CURRENT);
 
     // Start upgrading the newer repo
     m_helper.getOrCreateRepositoryVersion(targetStack, targetRepo);
     c.createClusterVersion(targetStack, targetRepo, "admin", RepositoryVersionState.INSTALLING);
     c.transitionClusterVersion(targetStack, targetRepo, RepositoryVersionState.INSTALLED);
-    c.transitionClusterVersion(targetStack, targetRepo, RepositoryVersionState.UPGRADING);
 
     c.mapHostVersions(Collections.singleton(hostName), c.getCurrentClusterVersion(),
         RepositoryVersionState.CURRENT);
@@ -211,7 +211,7 @@ public class UpgradeActionTest {
     HostVersionEntity entity = new HostVersionEntity();
     entity.setHostEntity(hostDAO.findByName(hostName));
     entity.setRepositoryVersion(repoVersionDAO.findByStackAndVersion(targetStack, targetRepo));
-    entity.setState(RepositoryVersionState.UPGRADING);
+    entity.setState(RepositoryVersionState.INSTALLING);
     hostVersionDAO.create(entity);
   }
 
@@ -235,7 +235,7 @@ public class UpgradeActionTest {
 
     // Create the starting repo version
     m_helper.getOrCreateRepositoryVersion(sourceStack, sourceRepo);
-    c.createClusterVersion(sourceStack, sourceRepo, "admin", RepositoryVersionState.UPGRADING);
+    c.createClusterVersion(sourceStack, sourceRepo, "admin", RepositoryVersionState.INSTALLING);
     c.transitionClusterVersion(sourceStack, sourceRepo, RepositoryVersionState.CURRENT);
 
     // Start upgrading the mid repo
@@ -243,8 +243,6 @@ public class UpgradeActionTest {
     c.setDesiredStackVersion(midStack);
     c.createClusterVersion(midStack, midRepo, "admin", RepositoryVersionState.INSTALLING);
     c.transitionClusterVersion(midStack, midRepo, RepositoryVersionState.INSTALLED);
-    c.transitionClusterVersion(midStack, midRepo, RepositoryVersionState.UPGRADING);
-    c.transitionClusterVersion(midStack, midRepo, RepositoryVersionState.UPGRADED);
     c.transitionClusterVersion(midStack, midRepo, RepositoryVersionState.CURRENT);
 
     // Set original version as INSTALLED
@@ -257,8 +255,6 @@ public class UpgradeActionTest {
     c.setDesiredStackVersion(targetStack);
     c.createClusterVersion(targetStack, targetRepo, "admin", RepositoryVersionState.INSTALLING);
     c.transitionClusterVersion(targetStack, targetRepo, RepositoryVersionState.INSTALLED);
-    c.transitionClusterVersion(targetStack, targetRepo, RepositoryVersionState.UPGRADING);
-    c.transitionClusterVersion(targetStack, targetRepo, RepositoryVersionState.UPGRADED);
 
     // Create a host version for the starting repo in INSTALLED
     HostVersionEntity entitySource = new HostVersionEntity();
@@ -275,7 +271,7 @@ public class UpgradeActionTest {
     HostVersionEntity entityTarget = new HostVersionEntity();
     entityTarget.setHostEntity(hostDAO.findByName(hostName));
     entityTarget.setRepositoryVersion(repoVersionDAO.findByStackAndVersion(targetStack, targetRepo));
-    entityTarget.setState(RepositoryVersionState.UPGRADED);
+    entityTarget.setState(RepositoryVersionState.INSTALLED);
     hostVersionDAO.create(entityTarget);
   }
 
@@ -310,7 +306,7 @@ public class UpgradeActionTest {
 
     // Create the starting repo version
     m_helper.getOrCreateRepositoryVersion(sourceStack, sourceRepo);
-    c.createClusterVersion(sourceStack, sourceRepo, "admin", RepositoryVersionState.UPGRADING);
+    c.createClusterVersion(sourceStack, sourceRepo, "admin", RepositoryVersionState.INSTALLING);
     c.transitionClusterVersion(sourceStack, sourceRepo, RepositoryVersionState.CURRENT);
 
     // Create the new repo version
@@ -323,8 +319,6 @@ public class UpgradeActionTest {
     // Start upgrading the newer repo
     c.createClusterVersion(targetStack, targetRepo, "admin", RepositoryVersionState.INSTALLING);
     c.transitionClusterVersion(targetStack, targetRepo, RepositoryVersionState.INSTALLED);
-    c.transitionClusterVersion(targetStack, targetRepo, RepositoryVersionState.UPGRADING);
-    c.transitionClusterVersion(targetStack, targetRepo, RepositoryVersionState.UPGRADED);
     c.setCurrentStackVersion(targetStack);
 
     c.mapHostVersions(Collections.singleton(hostName), c.getCurrentClusterVersion(),
@@ -337,7 +331,7 @@ public class UpgradeActionTest {
         targetStack, targetRepo);
 
     HostVersionEntity entity = new HostVersionEntity(hostDAO.findByName(hostName),
-        repositoryVersionEntity, RepositoryVersionState.UPGRADED);
+        repositoryVersionEntity, RepositoryVersionState.INSTALLED);
 
     hostVersionDAO.create(entity);
 
@@ -346,7 +340,7 @@ public class UpgradeActionTest {
         targetStack, targetRepo);
 
     assertEquals(1, hostVersions.size());
-    assertEquals(RepositoryVersionState.UPGRADED, hostVersions.get(0).getState());
+    assertEquals(RepositoryVersionState.INSTALLED, hostVersions.get(0).getState());
   }
 
   private void makeCrossStackUpgradeCluster(StackId sourceStack, String sourceRepo, StackId targetStack, String targetRepo) throws Exception {
@@ -380,7 +374,7 @@ public class UpgradeActionTest {
 
     // Create the starting repo version
     m_helper.getOrCreateRepositoryVersion(sourceStack, sourceRepo);
-    c.createClusterVersion(sourceStack, sourceRepo, "admin", RepositoryVersionState.UPGRADING);
+    c.createClusterVersion(sourceStack, sourceRepo, "admin", RepositoryVersionState.INSTALLING);
     c.transitionClusterVersion(sourceStack, sourceRepo, RepositoryVersionState.CURRENT);
 
     // Create the new repo version
@@ -392,8 +386,6 @@ public class UpgradeActionTest {
     // Start upgrading the newer repo
     c.createClusterVersion(targetStack, targetRepo, "admin", RepositoryVersionState.INSTALLING);
     c.transitionClusterVersion(targetStack, targetRepo, RepositoryVersionState.INSTALLED);
-    c.transitionClusterVersion(targetStack, targetRepo, RepositoryVersionState.UPGRADING);
-    c.transitionClusterVersion(targetStack, targetRepo, RepositoryVersionState.UPGRADED);
 
     c.mapHostVersions(Collections.singleton(hostName), c.getCurrentClusterVersion(),
         RepositoryVersionState.CURRENT);
@@ -403,7 +395,7 @@ public class UpgradeActionTest {
     HostVersionEntity entity = new HostVersionEntity();
     entity.setHostEntity(hostDAO.findByName(hostName));
     entity.setRepositoryVersion(repoVersionDAO.findByStackAndVersion(targetStack, targetRepo));
-    entity.setState(RepositoryVersionState.UPGRADED);
+    entity.setState(RepositoryVersionState.INSTALLED);
     hostVersionDAO.create(entity);
   }
 
@@ -802,7 +794,7 @@ public class UpgradeActionTest {
     List<HostVersionEntity> hosts = dao.findByClusterStackAndVersion(clusterName, targetStack, targetRepo);
     assertFalse(hosts.isEmpty());
     for (HostVersionEntity hve : hosts) {
-      assertFalse(hve.getState() == RepositoryVersionState.INSTALLED);
+      assertTrue(hve.getState() == RepositoryVersionState.INSTALLED);
     }
 
     FinalizeUpgradeAction action = m_injector.getInstance(FinalizeUpgradeAction.class);
@@ -874,17 +866,17 @@ public class UpgradeActionTest {
     ClusterVersionEntity upgradingClusterVersion = clusterVersionDAO.findByClusterAndStackAndVersion(
         clusterName, HDP_22_STACK, targetRepo);
 
-    upgradingClusterVersion.setState(RepositoryVersionState.UPGRADING);
+    upgradingClusterVersion.setState(RepositoryVersionState.INSTALLING);
     upgradingClusterVersion = clusterVersionDAO.merge(upgradingClusterVersion);
 
     // verify the conditions for the test are met properly
     upgradingClusterVersion = clusterVersionDAO.findByClusterAndStackAndVersion(clusterName, HDP_22_STACK, targetRepo);
     List<HostVersionEntity> hostVersions = hostVersionDAO.findByClusterStackAndVersion(clusterName, HDP_22_STACK, targetRepo);
 
-    assertEquals(RepositoryVersionState.UPGRADING, upgradingClusterVersion.getState());
+    assertEquals(RepositoryVersionState.INSTALLING, upgradingClusterVersion.getState());
     assertTrue(hostVersions.size() > 0);
     for (HostVersionEntity hostVersion : hostVersions) {
-      assertEquals(RepositoryVersionState.UPGRADED, hostVersion.getState());
+      assertEquals(RepositoryVersionState.INSTALLED, hostVersion.getState());
     }
 
     // now finalize and ensure we can transition from UPGRADING to UPGRADED

http://git-wip-us.apache.org/repos/asf/ambari/blob/cbef0c14/ambari-server/src/test/java/org/apache/ambari/server/state/ServiceComponentTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/state/ServiceComponentTest.java b/ambari-server/src/test/java/org/apache/ambari/server/state/ServiceComponentTest.java
index 55e7a61..0adac80 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/state/ServiceComponentTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/state/ServiceComponentTest.java
@@ -96,7 +96,7 @@ public class ServiceComponentTest {
     Assert.assertNotNull(cluster);
     helper.getOrCreateRepositoryVersion(stackId, stackId.getStackVersion());
     cluster.createClusterVersion(stackId, stackId.getStackVersion(), "admin",
-        RepositoryVersionState.UPGRADING);
+        RepositoryVersionState.INSTALLING);
 
     Service s = serviceFactory.createNew(cluster, serviceName);
     cluster.addService(s);

http://git-wip-us.apache.org/repos/asf/ambari/blob/cbef0c14/ambari-server/src/test/java/org/apache/ambari/server/state/UpgradeHelperTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/state/UpgradeHelperTest.java b/ambari-server/src/test/java/org/apache/ambari/server/state/UpgradeHelperTest.java
index 62d5981..1a5d4e7 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/state/UpgradeHelperTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/state/UpgradeHelperTest.java
@@ -1017,7 +1017,7 @@ public class UpgradeHelperTest {
 
     c.createClusterVersion(stackId,
         c.getDesiredStackVersion().getStackVersion(), "admin",
-        RepositoryVersionState.UPGRADING);
+        RepositoryVersionState.INSTALLING);
 
     for (int i = 0; i < 4; i++) {
       String hostName = "h" + (i+1);
@@ -1234,7 +1234,7 @@ public class UpgradeHelperTest {
 
     c.createClusterVersion(stackId,
         c.getDesiredStackVersion().getStackVersion(), "admin",
-        RepositoryVersionState.UPGRADING);
+        RepositoryVersionState.INSTALLING);
 
     for (int i = 0; i < 2; i++) {
       String hostName = "h" + (i+1);
@@ -1314,7 +1314,7 @@ public class UpgradeHelperTest {
 
     c.createClusterVersion(stackId,
         c.getDesiredStackVersion().getStackVersion(), "admin",
-        RepositoryVersionState.UPGRADING);
+        RepositoryVersionState.INSTALLING);
 
     for (int i = 0; i < 2; i++) {
       String hostName = "h" + (i+1);
@@ -1380,7 +1380,7 @@ public class UpgradeHelperTest {
 
     c.createClusterVersion(stackId,
         c.getDesiredStackVersion().getStackVersion(), "admin",
-        RepositoryVersionState.UPGRADING);
+        RepositoryVersionState.INSTALLING);
 
     for (int i = 0; i < 2; i++) {
       String hostName = "h" + (i+1);

http://git-wip-us.apache.org/repos/asf/ambari/blob/cbef0c14/ambari-server/src/test/java/org/apache/ambari/server/state/cluster/ClusterDeadlockTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/state/cluster/ClusterDeadlockTest.java b/ambari-server/src/test/java/org/apache/ambari/server/state/cluster/ClusterDeadlockTest.java
index 847de7d..0a3286c 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/state/cluster/ClusterDeadlockTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/state/cluster/ClusterDeadlockTest.java
@@ -123,7 +123,7 @@ public class ClusterDeadlockTest {
     cluster = clusters.getCluster("c1");
     helper.getOrCreateRepositoryVersion(stackId, stackId.getStackVersion());
     cluster.createClusterVersion(stackId,
-        stackId.getStackVersion(), "admin", RepositoryVersionState.UPGRADING);
+        stackId.getStackVersion(), "admin", RepositoryVersionState.INSTALLING);
 
     Config config1 = configFactory.createNew(cluster, "test-type1", new HashMap<String, String>(), new HashMap<String,
         Map<String, String>>());

http://git-wip-us.apache.org/repos/asf/ambari/blob/cbef0c14/ambari-server/src/test/java/org/apache/ambari/server/state/cluster/ClusterTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/state/cluster/ClusterTest.java b/ambari-server/src/test/java/org/apache/ambari/server/state/cluster/ClusterTest.java
index 2ffcd5d..87da21c 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/state/cluster/ClusterTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/state/cluster/ClusterTest.java
@@ -32,6 +32,7 @@ import java.util.Arrays;
 import java.util.Collection;
 import java.util.Collections;
 import java.util.ConcurrentModificationException;
+import java.util.EnumSet;
 import java.util.HashMap;
 import java.util.HashSet;
 import java.util.Iterator;
@@ -124,6 +125,9 @@ import junit.framework.Assert;
 
 public class ClusterTest {
 
+  private static final EnumSet<RepositoryVersionState> TERMINAL_VERSION_STATES =
+      EnumSet.of(RepositoryVersionState.CURRENT, RepositoryVersionState.INSTALLED);
+
   private Clusters clusters;
   private Cluster c1;
   private Injector injector;
@@ -256,7 +260,7 @@ public class ClusterTest {
 
     helper.getOrCreateRepositoryVersion(stackId, stackId.getStackVersion());
     c1.createClusterVersion(stackId, stackId.getStackVersion(), "admin",
-        RepositoryVersionState.UPGRADING);
+        RepositoryVersionState.INSTALLING);
     c1.transitionClusterVersion(stackId, stackId.getStackVersion(),
         RepositoryVersionState.CURRENT);
 
@@ -1345,13 +1349,6 @@ public class ClusterTest {
 
     assertStateException(stackId, version, RepositoryVersionState.CURRENT,
         RepositoryVersionState.INSTALLING);
-    assertStateException(stackId, version, RepositoryVersionState.UPGRADING,
-        RepositoryVersionState.INSTALLING);
-    assertStateException(stackId, version, RepositoryVersionState.UPGRADED,
-        RepositoryVersionState.INSTALLING);
-    assertStateException(stackId, version,
-        RepositoryVersionState.UPGRADE_FAILED,
-        RepositoryVersionState.INSTALLING);
 
     c1.transitionClusterVersion(stackId, version,
         RepositoryVersionState.INSTALL_FAILED);
@@ -1362,13 +1359,6 @@ public class ClusterTest {
         RepositoryVersionState.INSTALL_FAILED);
     assertStateException(stackId, version, RepositoryVersionState.INSTALLED,
         RepositoryVersionState.INSTALL_FAILED);
-    assertStateException(stackId, version, RepositoryVersionState.UPGRADING,
-        RepositoryVersionState.INSTALL_FAILED);
-    assertStateException(stackId, version, RepositoryVersionState.UPGRADED,
-        RepositoryVersionState.INSTALL_FAILED);
-    assertStateException(stackId, version,
-        RepositoryVersionState.UPGRADE_FAILED,
-        RepositoryVersionState.INSTALL_FAILED);
     assertStateException(stackId, version, RepositoryVersionState.OUT_OF_SYNC,
         RepositoryVersionState.INSTALL_FAILED);
 
@@ -1380,10 +1370,6 @@ public class ClusterTest {
         RepositoryVersionState.INSTALLED);
     checkStackVersionState(stackId, version, RepositoryVersionState.INSTALLED);
 
-    assertStateException(stackId, version, RepositoryVersionState.CURRENT,
-        RepositoryVersionState.INSTALLED);
-    assertStateException(stackId, version,
-        RepositoryVersionState.UPGRADE_FAILED, RepositoryVersionState.INSTALLED);
     assertStateException(stackId, version,
         RepositoryVersionState.INSTALL_FAILED, RepositoryVersionState.INSTALLED);
 
@@ -1398,13 +1384,6 @@ public class ClusterTest {
     assertStateException(stackId, version,
         RepositoryVersionState.INSTALL_FAILED,
         RepositoryVersionState.OUT_OF_SYNC);
-    assertStateException(stackId, version, RepositoryVersionState.UPGRADING,
-        RepositoryVersionState.OUT_OF_SYNC);
-    assertStateException(stackId, version, RepositoryVersionState.UPGRADED,
-        RepositoryVersionState.OUT_OF_SYNC);
-    assertStateException(stackId, version,
-        RepositoryVersionState.UPGRADE_FAILED,
-        RepositoryVersionState.OUT_OF_SYNC);
 
     c1.transitionClusterVersion(stackId, version,
         RepositoryVersionState.INSTALLING);
@@ -1414,55 +1393,6 @@ public class ClusterTest {
         RepositoryVersionState.INSTALLED);
     checkStackVersionState(stackId, version, RepositoryVersionState.INSTALLED);
 
-    c1.transitionClusterVersion(stackId, version,
-        RepositoryVersionState.UPGRADING);
-    checkStackVersionState(stackId, version, RepositoryVersionState.UPGRADING);
-
-    assertStateException(stackId, version, RepositoryVersionState.CURRENT,
-        RepositoryVersionState.UPGRADING);
-    assertStateException(stackId, version, RepositoryVersionState.INSTALLED,
-        RepositoryVersionState.UPGRADING);
-    assertStateException(stackId, version,
-        RepositoryVersionState.INSTALL_FAILED, RepositoryVersionState.UPGRADING);
-    assertStateException(stackId, version, RepositoryVersionState.OUT_OF_SYNC,
-        RepositoryVersionState.UPGRADING);
-
-    c1.transitionClusterVersion(stackId, version,
-        RepositoryVersionState.UPGRADE_FAILED);
-    checkStackVersionState(stackId, version,
-        RepositoryVersionState.UPGRADE_FAILED);
-
-    assertStateException(stackId, version, RepositoryVersionState.CURRENT,
-        RepositoryVersionState.UPGRADE_FAILED);
-    assertStateException(stackId, version, RepositoryVersionState.INSTALLED,
-        RepositoryVersionState.UPGRADE_FAILED);
-    assertStateException(stackId, version,
-        RepositoryVersionState.INSTALL_FAILED,
-        RepositoryVersionState.UPGRADE_FAILED);
-    assertStateException(stackId, version, RepositoryVersionState.UPGRADED,
-        RepositoryVersionState.UPGRADE_FAILED);
-    assertStateException(stackId, version, RepositoryVersionState.OUT_OF_SYNC,
-        RepositoryVersionState.UPGRADE_FAILED);
-
-    c1.transitionClusterVersion(stackId, version,
-        RepositoryVersionState.UPGRADING);
-    checkStackVersionState(stackId, version, RepositoryVersionState.UPGRADING);
-
-    c1.transitionClusterVersion(stackId, version,
-        RepositoryVersionState.UPGRADED);
-    checkStackVersionState(stackId, version, RepositoryVersionState.UPGRADED);
-
-    assertStateException(stackId, version, RepositoryVersionState.INSTALLED,
-        RepositoryVersionState.UPGRADED);
-    assertStateException(stackId, version,
-        RepositoryVersionState.INSTALL_FAILED, RepositoryVersionState.UPGRADED);
-    assertStateException(stackId, version, RepositoryVersionState.UPGRADING,
-        RepositoryVersionState.UPGRADED);
-    assertStateException(stackId, version,
-        RepositoryVersionState.UPGRADE_FAILED, RepositoryVersionState.UPGRADED);
-    assertStateException(stackId, version, RepositoryVersionState.OUT_OF_SYNC,
-        RepositoryVersionState.UPGRADED);
-
     c1.setDesiredStackVersion(stackId);
     c1.transitionClusterVersion(stackId, version,
         RepositoryVersionState.CURRENT);
@@ -1487,9 +1417,6 @@ public class ClusterTest {
         RepositoryVersionState.INSTALLING);
     c1.transitionClusterVersion(stackId, "0.2",
         RepositoryVersionState.INSTALLED);
-    c1.transitionClusterVersion(stackId, "0.2",
-        RepositoryVersionState.UPGRADING);
-    c1.transitionClusterVersion(stackId, "0.2", RepositoryVersionState.UPGRADED);
     try {
       ClusterVersionDAOMock.failOnCurrentVersionState = true;
       c1.transitionClusterVersion(stackId, "0.2",
@@ -1709,49 +1636,6 @@ public class ClusterTest {
     checkStackVersionState(stackId, stackVersion,
         RepositoryVersionState.INSTALLED);
 
-    // Phase 2: Upgrade stack
-    hv1.setState(RepositoryVersionState.UPGRADING);
-    hostVersionDAO.merge(hv1);
-    c1.recalculateClusterVersionState(repositoryVersionEntity);
-    checkStackVersionState(stackId, stackVersion,
-        RepositoryVersionState.UPGRADING);
-
-    hv1.setState(RepositoryVersionState.UPGRADED);
-    hostVersionDAO.merge(hv1);
-    c1.recalculateClusterVersionState(repositoryVersionEntity);
-    checkStackVersionState(stackId, stackVersion,
-        RepositoryVersionState.UPGRADING);
-    // reset host1 state
-    hv1.setState(RepositoryVersionState.UPGRADING);
-
-    hv2.setState(RepositoryVersionState.UPGRADING);
-    hostVersionDAO.merge(hv2);
-    c1.recalculateClusterVersionState(repositoryVersionEntity);
-    checkStackVersionState(stackId, stackVersion,
-        RepositoryVersionState.UPGRADING);
-
-    hv2.setState(RepositoryVersionState.UPGRADE_FAILED);
-    hostVersionDAO.merge(hv2);
-    c1.recalculateClusterVersionState(repositoryVersionEntity);
-    checkStackVersionState(stackId, stackVersion,
-        RepositoryVersionState.UPGRADE_FAILED);
-    // Retry by going back to UPGRADING
-    c1.transitionClusterVersion(stackId, stackVersion,
-        RepositoryVersionState.UPGRADING);
-
-    hv2.setState(RepositoryVersionState.UPGRADED);
-    hostVersionDAO.merge(hv2);
-    c1.recalculateClusterVersionState(repositoryVersionEntity);
-    checkStackVersionState(stackId, stackVersion,
-        RepositoryVersionState.UPGRADING);
-
-    // Now both hosts are UPGRADED
-    hv1.setState(RepositoryVersionState.UPGRADED);
-    hostVersionDAO.merge(hv1);
-    c1.recalculateClusterVersionState(repositoryVersionEntity);
-    checkStackVersionState(stackId, stackVersion,
-        RepositoryVersionState.UPGRADED);
-
     // Set both hosts to CURRENT
     hv1.setState(RepositoryVersionState.CURRENT);
     hostVersionDAO.merge(hv1);
@@ -1868,11 +1752,6 @@ public class ClusterTest {
         Assert.assertNotNull(repositoryVersion);
         Assert.assertTrue(clusterVersions != null && clusterVersions.size() == 1);
 
-        // First component to report a version should cause the ClusterVersion to go to UPGRADING
-        if (versionedComponentCount == 1 && i < (hostComponentStates.size() - 1)) {
-          Assert.assertEquals(clusterVersions.iterator().next().getState(), RepositoryVersionState.UPGRADING);
-        }
-
         // Last component to report a version should cause the ClusterVersion to go to CURRENT
         if (i == hostComponentStates.size() - 1) {
           Assert.assertEquals(clusterVersions.iterator().next().getState(), RepositoryVersionState.CURRENT);
@@ -1976,28 +1855,13 @@ public class ClusterTest {
         RepositoryVersionEntity repositoryVersion = repositoryVersionDAO.findByStackAndVersion(stackId, v2);
         Assert.assertNotNull(repositoryVersion);
         Assert.assertTrue(clusterVersions != null && clusterVersions.size() == 2);
-
-        // First component to report a version should cause the ClusterVersion to go to UPGRADING
-        if (versionedComponentCount == 1 && i < (hostComponentStates.size() - 1)) {
-          cv2 = clusterVersionDAO.findByClusterAndStackAndVersion(clusterName, stackId, v2);
-          Assert.assertEquals(cv2.getState(), RepositoryVersionState.UPGRADING);
-        }
       }
     }
 
-    // Last component to report a version should still keep the ClusterVersion in UPGRADING because
-    // hosts 3 and 5 only have Ganglia and the HostVersion will remain in INSTALLED
-    cv2 = clusterVersionDAO.findByClusterAndStackAndVersion(clusterName, stackId, v2);
-    Assert.assertEquals(cv2.getState(), RepositoryVersionState.UPGRADING);
-
     Collection<HostVersionEntity> v2HostVersions = hostVersionDAO.findByClusterStackAndVersion(clusterName, stackId, v2);
     Assert.assertEquals(v2HostVersions.size(), clusters.getHostsForCluster(clusterName).size());
     for (HostVersionEntity hve : v2HostVersions) {
-      if (hve.getHostName().equals("h-3") || hve.getHostName().equals("h-5")) {
-        Assert.assertEquals(hve.getState(), RepositoryVersionState.INSTALLED);
-      } else {
-        Assert.assertEquals(hve.getState(), RepositoryVersionState.UPGRADED);
-      }
+      Assert.assertTrue(TERMINAL_VERSION_STATES.contains(hve.getState()));
     }
   }
 
@@ -2101,7 +1965,7 @@ public class ClusterTest {
 
     c1.setCurrentStackVersion(stackId);
     c1.createClusterVersion(stackId, v1, "admin",
-        RepositoryVersionState.UPGRADING);
+        RepositoryVersionState.INSTALLING);
     c1.transitionClusterVersion(stackId, v1, RepositoryVersionState.CURRENT);
 
     clusters.mapHostToCluster("h-1", clusterName);
@@ -2124,9 +1988,6 @@ public class ClusterTest {
     c1.createClusterVersion(stackId, v2, "admin",
         RepositoryVersionState.INSTALLING);
     c1.transitionClusterVersion(stackId, v2, RepositoryVersionState.INSTALLED);
-    c1.transitionClusterVersion(stackId, v2, RepositoryVersionState.UPGRADING);
-    c1.transitionClusterVersion(stackId, v2, RepositoryVersionState.UPGRADED);
-
     c1.transitionClusterVersion(stackId, v2, RepositoryVersionState.CURRENT);
 
     entities = hostVersionDAO.findByClusterAndHost(clusterName, "h-3");
@@ -2175,7 +2036,7 @@ public class ClusterTest {
 
     c1.setCurrentStackVersion(stackId);
     c1.createClusterVersion(stackId, v1, "admin",
-        RepositoryVersionState.UPGRADING);
+        RepositoryVersionState.INSTALLING);
     c1.transitionClusterVersion(stackId, v1, RepositoryVersionState.CURRENT);
 
     clusters.mapHostToCluster("h-1", clusterName);

http://git-wip-us.apache.org/repos/asf/ambari/blob/cbef0c14/ambari-server/src/test/java/org/apache/ambari/server/state/cluster/ClustersDeadlockTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/state/cluster/ClustersDeadlockTest.java b/ambari-server/src/test/java/org/apache/ambari/server/state/cluster/ClustersDeadlockTest.java
index 7cb7679..a0a6444 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/state/cluster/ClustersDeadlockTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/state/cluster/ClustersDeadlockTest.java
@@ -105,7 +105,7 @@ public class ClustersDeadlockTest {
     cluster = clusters.getCluster(CLUSTER_NAME);
     helper.getOrCreateRepositoryVersion(stackId, stackId.getStackVersion());
     cluster.createClusterVersion(stackId, stackId.getStackVersion(), "admin",
-        RepositoryVersionState.UPGRADING);
+        RepositoryVersionState.INSTALLING);
 
     // install HDFS
     installService("HDFS");

http://git-wip-us.apache.org/repos/asf/ambari/blob/cbef0c14/ambari-server/src/test/java/org/apache/ambari/server/state/cluster/ClustersTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/state/cluster/ClustersTest.java b/ambari-server/src/test/java/org/apache/ambari/server/state/cluster/ClustersTest.java
index daf8d12..1b0cf8e 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/state/cluster/ClustersTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/state/cluster/ClustersTest.java
@@ -260,12 +260,12 @@ public class ClustersTest {
     cluster1.setDesiredStackVersion(stackId);
     helper.getOrCreateRepositoryVersion(stackId, stackId.getStackVersion());
     cluster1.createClusterVersion(stackId, stackId.getStackVersion(), "admin",
-        RepositoryVersionState.UPGRADING);
+        RepositoryVersionState.INSTALLING);
     cluster1.transitionClusterVersion(stackId, stackId.getStackVersion(),
         RepositoryVersionState.CURRENT);
     cluster2.setDesiredStackVersion(stackId);
     cluster2.createClusterVersion(stackId, stackId.getStackVersion(), "admin",
-        RepositoryVersionState.UPGRADING);
+        RepositoryVersionState.INSTALLING);
     cluster2.transitionClusterVersion(stackId, stackId.getStackVersion(),
         RepositoryVersionState.CURRENT);
 
@@ -354,12 +354,12 @@ public class ClustersTest {
 
     helper.getOrCreateRepositoryVersion(stackId, stackId.getStackVersion());
     cluster1.createClusterVersion(stackId, stackId.getStackVersion(), "admin",
-        RepositoryVersionState.UPGRADING);
+        RepositoryVersionState.INSTALLING);
     cluster1.transitionClusterVersion(stackId, stackId.getStackVersion(),
         RepositoryVersionState.CURRENT);
     cluster2.setDesiredStackVersion(stackId);
     cluster2.createClusterVersion(stackId, stackId.getStackVersion(), "admin",
-        RepositoryVersionState.UPGRADING);
+        RepositoryVersionState.INSTALLING);
     cluster2.transitionClusterVersion(stackId, stackId.getStackVersion(),
         RepositoryVersionState.CURRENT);
     clusters.addHost(h1);
@@ -394,7 +394,7 @@ public class ClustersTest {
     cluster.setCurrentStackVersion(stackId);
     helper.getOrCreateRepositoryVersion(stackId, stackId.getStackVersion());
     cluster.createClusterVersion(stackId, stackId.getStackVersion(), "admin",
-        RepositoryVersionState.UPGRADING);
+        RepositoryVersionState.INSTALLING);
     cluster.transitionClusterVersion(stackId, stackId.getStackVersion(),
         RepositoryVersionState.CURRENT);
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/cbef0c14/ambari-server/src/test/java/org/apache/ambari/server/state/cluster/ConcurrentServiceConfigVersionTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/state/cluster/ConcurrentServiceConfigVersionTest.java b/ambari-server/src/test/java/org/apache/ambari/server/state/cluster/ConcurrentServiceConfigVersionTest.java
index 4845d6f..ff5cbe8 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/state/cluster/ConcurrentServiceConfigVersionTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/state/cluster/ConcurrentServiceConfigVersionTest.java
@@ -110,7 +110,7 @@ public class ConcurrentServiceConfigVersionTest {
     cluster = clusters.getCluster("c1");
     helper.getOrCreateRepositoryVersion(stackId, stackId.getStackVersion());
     cluster.createClusterVersion(stackId,
-        stackId.getStackVersion(), "admin", RepositoryVersionState.UPGRADING);
+        stackId.getStackVersion(), "admin", RepositoryVersionState.INSTALLING);
 
     String hostName = "c6401.ambari.apache.org";
     clusters.addHost(hostName);

http://git-wip-us.apache.org/repos/asf/ambari/blob/cbef0c14/ambari-server/src/test/java/org/apache/ambari/server/state/cluster/ServiceComponentHostConcurrentWriteDeadlockTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/state/cluster/ServiceComponentHostConcurrentWriteDeadlockTest.java b/ambari-server/src/test/java/org/apache/ambari/server/state/cluster/ServiceComponentHostConcurrentWriteDeadlockTest.java
index 81399a1..7d2ba4d 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/state/cluster/ServiceComponentHostConcurrentWriteDeadlockTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/state/cluster/ServiceComponentHostConcurrentWriteDeadlockTest.java
@@ -112,7 +112,7 @@ public class ServiceComponentHostConcurrentWriteDeadlockTest {
     cluster = clusters.getCluster("c1");
     helper.getOrCreateRepositoryVersion(stackId, stackId.getStackVersion());
     cluster.createClusterVersion(stackId,
-        stackId.getStackVersion(), "admin", RepositoryVersionState.UPGRADING);
+        stackId.getStackVersion(), "admin", RepositoryVersionState.INSTALLING);
 
     Config config1 = configFactory.createNew(cluster, "test-type1", new HashMap<String, String>(), new HashMap<String,
         Map<String, String>>());

http://git-wip-us.apache.org/repos/asf/ambari/blob/cbef0c14/ambari-server/src/test/java/org/apache/ambari/server/state/host/HostTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/state/host/HostTest.java b/ambari-server/src/test/java/org/apache/ambari/server/state/host/HostTest.java
index 4248d13..99fc0a1 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/state/host/HostTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/state/host/HostTest.java
@@ -365,7 +365,7 @@ public class HostTest {
 
     helper.getOrCreateRepositoryVersion(stackId, stackId.getStackVersion());
     c1.createClusterVersion(stackId, stackId.getStackVersion(), "admin",
-        RepositoryVersionState.UPGRADING);
+        RepositoryVersionState.INSTALLING);
     Assert.assertEquals("c1", c1.getClusterName());
     Assert.assertEquals(1, c1.getClusterId());
     clusters.addHost("h1");
@@ -441,7 +441,7 @@ public class HostTest {
 
     helper.getOrCreateRepositoryVersion(stackId, stackId.getStackVersion());
     c1.createClusterVersion(stackId, stackId.getStackVersion(), "admin",
-        RepositoryVersionState.UPGRADING);
+        RepositoryVersionState.INSTALLING);
     c1.setDesiredStackVersion(stackId);
     clusters.mapHostToCluster("h1", "c1");
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/cbef0c14/ambari-server/src/test/java/org/apache/ambari/server/state/svccomphost/ServiceComponentHostTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/state/svccomphost/ServiceComponentHostTest.java b/ambari-server/src/test/java/org/apache/ambari/server/state/svccomphost/ServiceComponentHostTest.java
index 62f2cbf..57a7391 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/state/svccomphost/ServiceComponentHostTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/state/svccomphost/ServiceComponentHostTest.java
@@ -133,7 +133,7 @@ public class ServiceComponentHostTest {
     Cluster c1 = clusters.getCluster(clusterName);
     helper.getOrCreateRepositoryVersion(stackId, stackId.getStackVersion());
     c1.createClusterVersion(stackId, stackId.getStackVersion(), "admin",
-        RepositoryVersionState.UPGRADING);
+        RepositoryVersionState.INSTALLING);
   }
 
   @After
@@ -752,7 +752,7 @@ public class ServiceComponentHostTest {
 
     helper.getOrCreateRepositoryVersion(stackId, stackId.getStackVersion());
     cluster.createClusterVersion(stackId, stackId.getStackVersion(), "admin",
-        RepositoryVersionState.UPGRADING);
+        RepositoryVersionState.INSTALLING);
 
     ServiceComponentHost sch1 = createNewServiceComponentHost(cluster, "HDFS", "NAMENODE", hostName);
     ServiceComponentHost sch2 = createNewServiceComponentHost(cluster, "HDFS", "DATANODE", hostName);
@@ -977,7 +977,7 @@ public class ServiceComponentHostTest {
 
     helper.getOrCreateRepositoryVersion(stackId, stackId.getStackVersion());
     cluster.createClusterVersion(stackId, stackId.getStackVersion(), "admin",
-        RepositoryVersionState.UPGRADING);
+        RepositoryVersionState.INSTALLING);
 
     ServiceComponentHost sch1 = createNewServiceComponentHost(cluster, "HDFS", "NAMENODE", hostName);
     ServiceComponentHost sch2 = createNewServiceComponentHost(cluster, "HDFS", "DATANODE", hostName);
@@ -1109,7 +1109,7 @@ public class ServiceComponentHostTest {
 
     helper.getOrCreateRepositoryVersion(stackId, stackId.getStackVersion());
     cluster.createClusterVersion(stackId, stackId.getStackVersion(), "admin",
-        RepositoryVersionState.UPGRADING);
+        RepositoryVersionState.INSTALLING);
 
     HostEntity hostEntity = hostDAO.findByName(hostName);
     Assert.assertNotNull(hostEntity);
@@ -1153,7 +1153,7 @@ public class ServiceComponentHostTest {
 
     helper.getOrCreateRepositoryVersion(stackId, stackId.getStackVersion());
     cluster.createClusterVersion(stackId, stackId.getStackVersion(), "admin",
-        RepositoryVersionState.UPGRADING);
+        RepositoryVersionState.INSTALLING);
 
     HostEntity hostEntity = hostDAO.findByName(hostName);
     ServiceComponentHost sch1 = createNewServiceComponentHost(cluster, "HDFS", "NAMENODE", hostName);


[22/50] [abbrv] ambari git commit: AMBARI-14972 - Add PK to servicecomponentdesiredstate Table To Support FK Relationships (jonathanhurley)

Posted by nc...@apache.org.
AMBARI-14972 - Add PK to servicecomponentdesiredstate Table To Support FK Relationships (jonathanhurley)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/697c309c
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/697c309c
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/697c309c

Branch: refs/heads/trunk
Commit: 697c309c19e04949f1bb9a746f5a5d1e87fb3e39
Parents: d3db8da
Author: Jonathan Hurley <jh...@hortonworks.com>
Authored: Tue Feb 9 09:37:18 2016 -0500
Committer: Jonathan Hurley <jh...@hortonworks.com>
Committed: Tue Feb 9 09:37:18 2016 -0500

----------------------------------------------------------------------
 .../dao/ServiceComponentDesiredStateDAO.java    | 57 +++++++++++--
 .../HostComponentDesiredStateEntity.java        |  2 +-
 .../orm/entities/HostComponentStateEntity.java  |  1 -
 .../ServiceComponentDesiredStateEntity.java     | 47 +++++++++--
 .../ServiceComponentDesiredStateEntityPK.java   | 84 -------------------
 .../server/state/ServiceComponentImpl.java      | 59 +++++---------
 .../svccomphost/ServiceComponentHostImpl.java   | 17 ++--
 .../server/upgrade/UpgradeCatalog150.java       | 18 +---
 .../server/upgrade/UpgradeCatalog170.java       |  9 +-
 .../server/upgrade/UpgradeCatalog200.java       |  8 +-
 .../server/upgrade/UpgradeCatalog210.java       | 69 ++++++++--------
 .../main/resources/Ambari-DDL-Derby-CREATE.sql  | 10 ++-
 .../main/resources/Ambari-DDL-MySQL-CREATE.sql  |  6 +-
 .../main/resources/Ambari-DDL-Oracle-CREATE.sql |  7 +-
 .../resources/Ambari-DDL-Postgres-CREATE.sql    | 11 ++-
 .../Ambari-DDL-Postgres-EMBEDDED-CREATE.sql     | 10 ++-
 .../resources/Ambari-DDL-SQLAnywhere-CREATE.sql |  7 +-
 .../resources/Ambari-DDL-SQLServer-CREATE.sql   | 12 ++-
 .../server/state/ServiceComponentTest.java      | 11 +--
 .../server/upgrade/UpgradeCatalog170Test.java   |  9 +-
 .../server/upgrade/UpgradeCatalog200Test.java   |  9 +-
 .../server/upgrade/UpgradeCatalog210Test.java   | 86 ++++++++++----------
 .../server/upgrade/UpgradeCatalogHelper.java    |  5 +-
 23 files changed, 270 insertions(+), 284 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/697c309c/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/ServiceComponentDesiredStateDAO.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/ServiceComponentDesiredStateDAO.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/ServiceComponentDesiredStateDAO.java
index 341d1fd..b8c2fcc 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/ServiceComponentDesiredStateDAO.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/ServiceComponentDesiredStateDAO.java
@@ -26,7 +26,6 @@ import javax.persistence.TypedQuery;
 
 import org.apache.ambari.server.orm.RequiresSession;
 import org.apache.ambari.server.orm.entities.ServiceComponentDesiredStateEntity;
-import org.apache.ambari.server.orm.entities.ServiceComponentDesiredStateEntityPK;
 
 import com.google.inject.Inject;
 import com.google.inject.Provider;
@@ -38,9 +37,22 @@ public class ServiceComponentDesiredStateDAO {
   @Inject
   Provider<EntityManager> entityManagerProvider;
 
+  /**
+   * DAO utilities for dealing mostly with {@link TypedQuery} results.
+   */
+  @Inject
+  private DaoUtils daoUtils;
+
+  /**
+   * Gets a {@link ServiceComponentDesiredStateEntity} by its PK ID.
+   *
+   * @param id
+   *          the ID.
+   * @return the entity or {@code null} if it does not exist.
+   */
   @RequiresSession
-  public ServiceComponentDesiredStateEntity findByPK(ServiceComponentDesiredStateEntityPK primaryKey) {
-    return entityManagerProvider.get().find(ServiceComponentDesiredStateEntity.class, primaryKey);
+  public ServiceComponentDesiredStateEntity findById(long id) {
+    return entityManagerProvider.get().find(ServiceComponentDesiredStateEntity.class, id);
   }
 
   @RequiresSession
@@ -55,6 +67,37 @@ public class ServiceComponentDesiredStateDAO {
     return null;
   }
 
+  /**
+   * Finds a {@link ServiceComponentDesiredStateEntity} by a combination of
+   * cluster, service, and component.
+   *
+   * @param clusterId
+   *          the cluster ID
+   * @param serviceName
+   *          the service name (not {@code null})
+   * @param componentName
+   *          the component name (not {@code null})
+   */
+  @RequiresSession
+  public ServiceComponentDesiredStateEntity findByName(long clusterId, String serviceName,
+      String componentName) {
+    EntityManager entityManager = entityManagerProvider.get();
+    TypedQuery<ServiceComponentDesiredStateEntity> query = entityManager.createNamedQuery(
+        "ServiceComponentDesiredStateEntity.findByName", ServiceComponentDesiredStateEntity.class);
+
+    query.setParameter("clusterId", clusterId);
+    query.setParameter("serviceName", serviceName);
+    query.setParameter("componentName", componentName);
+
+    ServiceComponentDesiredStateEntity entity = null;
+    List<ServiceComponentDesiredStateEntity> entities = daoUtils.selectList(query);
+    if (null != entities && !entities.isEmpty()) {
+      entity = entities.get(0);
+    }
+
+    return entity;
+  }
+
   @Transactional
   public void refresh(ServiceComponentDesiredStateEntity serviceComponentDesiredStateEntity) {
     entityManagerProvider.get().refresh(serviceComponentDesiredStateEntity);
@@ -76,8 +119,10 @@ public class ServiceComponentDesiredStateDAO {
   }
 
   @Transactional
-  public void removeByPK(ServiceComponentDesiredStateEntityPK primaryKey) {
-    ServiceComponentDesiredStateEntity entity = findByPK(primaryKey);
-    entityManagerProvider.get().remove(entity);
+  public void removeByName(long clusterId, String serviceName, String componentName) {
+    ServiceComponentDesiredStateEntity entity = findByName(clusterId, serviceName, componentName);
+    if (null != entity) {
+      entityManagerProvider.get().remove(entity);
+    }
   }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/697c309c/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/HostComponentDesiredStateEntity.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/HostComponentDesiredStateEntity.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/HostComponentDesiredStateEntity.java
index b57a467..fd15200 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/HostComponentDesiredStateEntity.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/HostComponentDesiredStateEntity.java
@@ -20,7 +20,6 @@ package org.apache.ambari.server.orm.entities;
 import static org.apache.commons.lang.StringUtils.defaultString;
 
 import javax.persistence.Basic;
-import javax.persistence.CascadeType;
 import javax.persistence.Column;
 import javax.persistence.Entity;
 import javax.persistence.EnumType;
@@ -95,6 +94,7 @@ public class HostComponentDesiredStateEntity {
   @Column(name = "admin_state", nullable = true, insertable = true, updatable = true)
   private HostComponentAdminState adminState;
 
+  @ManyToOne
   @JoinColumns({
       @JoinColumn(name = "cluster_id", referencedColumnName = "cluster_id", nullable = false),
       @JoinColumn(name = "service_name", referencedColumnName = "service_name", nullable = false),

http://git-wip-us.apache.org/repos/asf/ambari/blob/697c309c/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/HostComponentStateEntity.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/HostComponentStateEntity.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/HostComponentStateEntity.java
index f1af9b0..f92f645 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/HostComponentStateEntity.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/HostComponentStateEntity.java
@@ -18,7 +18,6 @@
 
 package org.apache.ambari.server.orm.entities;
 
-import javax.persistence.CascadeType;
 import javax.persistence.Column;
 import javax.persistence.Entity;
 import javax.persistence.EnumType;

http://git-wip-us.apache.org/repos/asf/ambari/blob/697c309c/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ServiceComponentDesiredStateEntity.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ServiceComponentDesiredStateEntity.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ServiceComponentDesiredStateEntity.java
index bda2543..d2d1b42 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ServiceComponentDesiredStateEntity.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ServiceComponentDesiredStateEntity.java
@@ -20,35 +20,59 @@ package org.apache.ambari.server.orm.entities;
 
 import java.util.Collection;
 
-import javax.persistence.CascadeType;
 import javax.persistence.Column;
 import javax.persistence.Entity;
 import javax.persistence.EnumType;
 import javax.persistence.Enumerated;
+import javax.persistence.GeneratedValue;
+import javax.persistence.GenerationType;
 import javax.persistence.Id;
 import javax.persistence.JoinColumn;
 import javax.persistence.JoinColumns;
 import javax.persistence.ManyToOne;
+import javax.persistence.NamedQueries;
+import javax.persistence.NamedQuery;
 import javax.persistence.OneToMany;
 import javax.persistence.OneToOne;
+import javax.persistence.Table;
+import javax.persistence.TableGenerator;
+import javax.persistence.UniqueConstraint;
 
 import org.apache.ambari.server.state.State;
 
-@javax.persistence.IdClass(ServiceComponentDesiredStateEntityPK.class)
-@javax.persistence.Table(name = "servicecomponentdesiredstate")
 @Entity
+@Table(
+    name = "servicecomponentdesiredstate",
+    uniqueConstraints = @UniqueConstraint(
+        name = "unq_scdesiredstate_name",
+        columnNames = { "component_name", "service_name", "cluster_id" }) )
+@TableGenerator(
+    name = "servicecomponentdesiredstate_id_generator",
+    table = "ambari_sequences",
+    pkColumnName = "sequence_name",
+    valueColumnName = "sequence_value",
+    pkColumnValue = "servicecomponentdesiredstate_id_seq",
+    initialValue = 0)
+@NamedQueries({
+ @NamedQuery(
+    name = "ServiceComponentDesiredStateEntity.findByName",
+    query = "SELECT scds FROM ServiceComponentDesiredStateEntity scds WHERE scds.clusterId = :clusterId AND scds.serviceName = :serviceName AND scds.componentName = :componentName") })
 public class ServiceComponentDesiredStateEntity {
 
-  @Column(name = "cluster_id", nullable = false, insertable = false, updatable = false, length = 10)
   @Id
+  @Column(name = "id", nullable = false, insertable = true, updatable = false)
+  @GeneratedValue(
+      strategy = GenerationType.TABLE,
+      generator = "servicecomponentdesiredstate_id_generator")
+  private Long id;
+
+  @Column(name = "cluster_id", nullable = false, insertable = false, updatable = false, length = 10)
   private Long clusterId;
 
   @Column(name = "service_name", nullable = false, insertable = false, updatable = false)
-  @Id
   private String serviceName;
 
   @Column(name = "component_name", nullable = false, insertable = true, updatable = true)
-  @Id
   private String componentName;
 
   @Column(name = "desired_state", nullable = false, insertable = true, updatable = true)
@@ -72,6 +96,10 @@ public class ServiceComponentDesiredStateEntity {
   @OneToMany(mappedBy = "serviceComponentDesiredStateEntity")
   private Collection<HostComponentDesiredStateEntity> hostComponentDesiredStateEntities;
 
+  public Long getId() {
+    return id;
+  }
+
   public Long getClusterId() {
     return clusterId;
   }
@@ -117,12 +145,16 @@ public class ServiceComponentDesiredStateEntity {
     if (this == o) {
       return true;
     }
+
     if (o == null || getClass() != o.getClass()) {
       return false;
     }
 
     ServiceComponentDesiredStateEntity that = (ServiceComponentDesiredStateEntity) o;
 
+    if (id != null ? !id.equals(that.id) : that.id != null) {
+      return false;
+    }
     if (clusterId != null ? !clusterId.equals(that.clusterId) : that.clusterId != null) {
       return false;
     }
@@ -144,7 +176,8 @@ public class ServiceComponentDesiredStateEntity {
 
   @Override
   public int hashCode() {
-    int result = clusterId != null ? clusterId.intValue() : 0;
+    int result = id != null ? id.hashCode() : 0;
+    result = 31 * result + (clusterId != null ? clusterId.hashCode() : 0);
     result = 31 * result + (serviceName != null ? serviceName.hashCode() : 0);
     result = 31 * result + (componentName != null ? componentName.hashCode() : 0);
     result = 31 * result + (desiredState != null ? desiredState.hashCode() : 0);

http://git-wip-us.apache.org/repos/asf/ambari/blob/697c309c/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ServiceComponentDesiredStateEntityPK.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ServiceComponentDesiredStateEntityPK.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ServiceComponentDesiredStateEntityPK.java
deleted file mode 100644
index d56e555..0000000
--- a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ServiceComponentDesiredStateEntityPK.java
+++ /dev/null
@@ -1,84 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.server.orm.entities;
-
-import javax.persistence.Column;
-import javax.persistence.Id;
-import java.io.Serializable;
-
-@SuppressWarnings("serial")
-public class ServiceComponentDesiredStateEntityPK implements Serializable {
-  private Long clusterId;
-
-  @Column(name = "cluster_id", nullable = false, insertable = true, updatable = true, length = 10)
-  @Id
-  public Long getClusterId() {
-    return clusterId;
-  }
-
-  public void setClusterId(Long clusterId) {
-    this.clusterId = clusterId;
-  }
-
-  private String serviceName;
-
-  @javax.persistence.Column(name = "service_name", nullable = false, insertable = false, updatable = false)
-  @Id
-  public String getServiceName() {
-    return serviceName;
-  }
-
-  public void setServiceName(String serviceName) {
-    this.serviceName = serviceName;
-  }
-
-  private String componentName;
-
-  @Id
-  @Column(name = "component_name", nullable = false, insertable = true, updatable = true)
-  public String getComponentName() {
-    return componentName;
-  }
-
-  public void setComponentName(String componentName) {
-    this.componentName = componentName;
-  }
-
-  @Override
-  public boolean equals(Object o) {
-    if (this == o) return true;
-    if (o == null || getClass() != o.getClass()) return false;
-
-    ServiceComponentDesiredStateEntityPK that = (ServiceComponentDesiredStateEntityPK) o;
-
-    if (clusterId != null ? !clusterId.equals(that.clusterId) : that.clusterId != null) return false;
-    if (componentName != null ? !componentName.equals(that.componentName) : that.componentName != null) return false;
-    if (serviceName != null ? !serviceName.equals(that.serviceName) : that.serviceName != null) return false;
-
-    return true;
-  }
-
-  @Override
-  public int hashCode() {
-    int result = clusterId != null ? clusterId.intValue() : 0;
-    result = 31 * result + (serviceName != null ? serviceName.hashCode() : 0);
-    result = 31 * result + (componentName != null ? componentName.hashCode() : 0);
-    return result;
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/697c309c/ambari-server/src/main/java/org/apache/ambari/server/state/ServiceComponentImpl.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/ServiceComponentImpl.java b/ambari-server/src/main/java/org/apache/ambari/server/state/ServiceComponentImpl.java
index 7e1dd1d..4afc857 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/ServiceComponentImpl.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/ServiceComponentImpl.java
@@ -39,7 +39,6 @@ import org.apache.ambari.server.orm.entities.HostComponentDesiredStateEntity;
 import org.apache.ambari.server.orm.entities.HostComponentDesiredStateEntityPK;
 import org.apache.ambari.server.orm.entities.HostComponentStateEntity;
 import org.apache.ambari.server.orm.entities.ServiceComponentDesiredStateEntity;
-import org.apache.ambari.server.orm.entities.ServiceComponentDesiredStateEntityPK;
 import org.apache.ambari.server.orm.entities.StackEntity;
 import org.apache.ambari.server.state.cluster.ClusterImpl;
 import org.slf4j.Logger;
@@ -74,8 +73,8 @@ public class ServiceComponentImpl implements ServiceComponent {
   private ServiceComponentHostFactory serviceComponentHostFactory;
   @Inject
   private AmbariMetaInfo ambariMetaInfo;
-  private ServiceComponentDesiredStateEntity desiredStateEntity;
-  private ServiceComponentDesiredStateEntityPK desiredStateEntityPK;
+
+  ServiceComponentDesiredStateEntity desiredStateEntity;
   private Map<String, ServiceComponentHost> hostComponents;
 
   /**
@@ -90,12 +89,13 @@ public class ServiceComponentImpl implements ServiceComponent {
     injector.injectMembers(this);
     clusterGlobalLock = service.getClusterGlobalLock();
     this.service = service;
-    desiredStateEntity = new ServiceComponentDesiredStateEntity(  );
+
+    desiredStateEntity = new ServiceComponentDesiredStateEntity();
     desiredStateEntity.setComponentName(componentName);
     desiredStateEntity.setDesiredState(State.INIT);
     desiredStateEntity.setServiceName(service.getName());
     desiredStateEntity.setClusterId(service.getClusterId());
-    desiredStateEntityPK = getDesiredStateEntityPK(desiredStateEntity);
+
     setDesiredStackVersion(service.getDesiredStackVersion());
 
     hostComponents = new HashMap<String, ServiceComponentHost>();
@@ -125,11 +125,12 @@ public class ServiceComponentImpl implements ServiceComponent {
     injector.injectMembers(this);
     clusterGlobalLock = service.getClusterGlobalLock();
     this.service = service;
+
     desiredStateEntity = serviceComponentDesiredStateEntity;
-    this.componentName = serviceComponentDesiredStateEntity.getComponentName();
+    componentName = serviceComponentDesiredStateEntity.getComponentName();
 
     hostComponents = new HashMap<String, ServiceComponentHost>();
-    for (HostComponentStateEntity hostComponentStateEntity : desiredStateEntity.getHostComponentStateEntities()) {
+    for (HostComponentStateEntity hostComponentStateEntity : serviceComponentDesiredStateEntity.getHostComponentStateEntities()) {
       HostComponentDesiredStateEntityPK pk = new HostComponentDesiredStateEntityPK();
       pk.setClusterId(hostComponentStateEntity.getClusterId());
       pk.setServiceName(hostComponentStateEntity.getServiceName());
@@ -167,8 +168,6 @@ public class ServiceComponentImpl implements ServiceComponent {
           + ", stackInfo=" + stackId.getStackId());
     }
 
-    desiredStateEntityPK = getDesiredStateEntityPK(desiredStateEntity);
-
     persisted = true;
   }
 
@@ -179,15 +178,7 @@ public class ServiceComponentImpl implements ServiceComponent {
 
   @Override
   public String getName() {
-    ServiceComponentDesiredStateEntity desiredStateEntity = getDesiredStateEntity();
-    if (desiredStateEntity != null) {
-      return desiredStateEntity.getComponentName();
-    } else {
-      LOG.warn("Trying to fetch a member from an entity object that may " +
-        "have been previously deleted, serviceName = " + getServiceName() + ", " +
-        "componentName = " + componentName);
-    }
-    return null;
+    return componentName;
   }
 
   @Override
@@ -372,7 +363,7 @@ public class ServiceComponentImpl implements ServiceComponent {
       ServiceComponentDesiredStateEntity desiredStateEntity = getDesiredStateEntity();
       if (desiredStateEntity != null) {
         desiredStateEntity.setDesiredState(state);
-        saveIfPersisted();
+        saveIfPersisted(desiredStateEntity);
       } else {
         LOG.warn("Setting a member on an entity object that may have been " +
           "previously deleted, serviceName = " + (service != null ? service.getName() : ""));
@@ -418,7 +409,7 @@ public class ServiceComponentImpl implements ServiceComponent {
       ServiceComponentDesiredStateEntity desiredStateEntity = getDesiredStateEntity();
       if (desiredStateEntity != null) {
         desiredStateEntity.setDesiredStack(stackEntity);
-        saveIfPersisted();
+        saveIfPersisted(desiredStateEntity);
       } else {
         LOG.warn("Setting a member on an entity object that may have been " +
           "previously deleted, serviceName = " + (service != null ? service.getName() : ""));
@@ -517,7 +508,7 @@ public class ServiceComponentImpl implements ServiceComponent {
           // service.refresh();
           persisted = true;
         } else {
-          saveIfPersisted();
+          saveIfPersisted(desiredStateEntity);
         }
       } finally {
         readWriteLock.writeLock().unlock();
@@ -536,9 +527,11 @@ public class ServiceComponentImpl implements ServiceComponent {
     pk.setServiceName(service.getName());
     ClusterServiceEntity serviceEntity = clusterServiceDAO.findByPK(pk);
 
+    ServiceComponentDesiredStateEntity desiredStateEntity = getDesiredStateEntity();
     desiredStateEntity.setClusterServiceEntity(serviceEntity);
+
     serviceComponentDesiredStateDAO.create(desiredStateEntity);
-    clusterServiceDAO.merge(serviceEntity);
+    serviceEntity = clusterServiceDAO.merge(serviceEntity);
   }
 
   @Override
@@ -547,10 +540,6 @@ public class ServiceComponentImpl implements ServiceComponent {
     readWriteLock.writeLock().lock();
     try {
       if (isPersisted()) {
-        ServiceComponentDesiredStateEntityPK pk = new ServiceComponentDesiredStateEntityPK();
-        pk.setComponentName(getName());
-        pk.setClusterId(getClusterId());
-        pk.setServiceName(getServiceName());
         serviceComponentDesiredStateDAO.refresh(getDesiredStateEntity());
       }
     } finally {
@@ -564,9 +553,9 @@ public class ServiceComponentImpl implements ServiceComponent {
    * has already been acquired from {@link #readWriteLock}.
    */
   @Transactional
-  void saveIfPersisted() {
+  void saveIfPersisted(ServiceComponentDesiredStateEntity desiredStateEntity) {
     if (isPersisted()) {
-      serviceComponentDesiredStateDAO.merge(desiredStateEntity);
+      desiredStateEntity = serviceComponentDesiredStateDAO.merge(desiredStateEntity);
     }
   }
 
@@ -732,18 +721,10 @@ public class ServiceComponentImpl implements ServiceComponent {
 
   // Refresh cached reference after ever setter
   private ServiceComponentDesiredStateEntity getDesiredStateEntity() {
-    if (isPersisted()) {
-      desiredStateEntity = serviceComponentDesiredStateDAO.findByPK(desiredStateEntityPK);
+    if (!isPersisted()) {
+      return desiredStateEntity;
     }
-    return desiredStateEntity;
-  }
-
-  private ServiceComponentDesiredStateEntityPK getDesiredStateEntityPK(ServiceComponentDesiredStateEntity desiredStateEntity) {
-    ServiceComponentDesiredStateEntityPK pk = new ServiceComponentDesiredStateEntityPK();
-    pk.setClusterId(desiredStateEntity.getClusterId());
-    pk.setComponentName(desiredStateEntity.getComponentName());
-    pk.setServiceName(desiredStateEntity.getServiceName());
 
-    return pk;
+    return serviceComponentDesiredStateDAO.findById(desiredStateEntity.getId());
   }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/697c309c/ambari-server/src/main/java/org/apache/ambari/server/state/svccomphost/ServiceComponentHostImpl.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/svccomphost/ServiceComponentHostImpl.java b/ambari-server/src/main/java/org/apache/ambari/server/state/svccomphost/ServiceComponentHostImpl.java
index bfb6214..92828af 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/svccomphost/ServiceComponentHostImpl.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/svccomphost/ServiceComponentHostImpl.java
@@ -51,7 +51,6 @@ import org.apache.ambari.server.orm.entities.HostComponentStateEntity;
 import org.apache.ambari.server.orm.entities.HostEntity;
 import org.apache.ambari.server.orm.entities.RepositoryVersionEntity;
 import org.apache.ambari.server.orm.entities.ServiceComponentDesiredStateEntity;
-import org.apache.ambari.server.orm.entities.ServiceComponentDesiredStateEntityPK;
 import org.apache.ambari.server.orm.entities.StackEntity;
 import org.apache.ambari.server.state.Cluster;
 import org.apache.ambari.server.state.Clusters;
@@ -1453,26 +1452,26 @@ public class ServiceComponentHostImpl implements ServiceComponentHost {
 
   @Transactional
   protected void persistEntities() {
+    ServiceComponentDesiredStateEntity serviceComponentDesiredStateEntity = serviceComponentDesiredStateDAO.findByName(
+        serviceComponent.getClusterId(), serviceComponent.getServiceName(),
+        serviceComponent.getName());
+
     HostEntity hostEntity = hostDAO.findByName(getHostName());
     hostEntity.addHostComponentStateEntity(stateEntity);
     hostEntity.addHostComponentDesiredStateEntity(desiredStateEntity);
 
-    ServiceComponentDesiredStateEntityPK dpk = new ServiceComponentDesiredStateEntityPK();
-    dpk.setClusterId(serviceComponent.getClusterId());
-    dpk.setServiceName(serviceComponent.getServiceName());
-    dpk.setComponentName(serviceComponent.getName());
-
-    ServiceComponentDesiredStateEntity serviceComponentDesiredStateEntity = serviceComponentDesiredStateDAO.findByPK(dpk);
-    serviceComponentDesiredStateEntity.getHostComponentDesiredStateEntities().add(desiredStateEntity);
-
     desiredStateEntity.setServiceComponentDesiredStateEntity(serviceComponentDesiredStateEntity);
     desiredStateEntity.setHostEntity(hostEntity);
+
     stateEntity.setServiceComponentDesiredStateEntity(serviceComponentDesiredStateEntity);
     stateEntity.setHostEntity(hostEntity);
 
     hostComponentStateDAO.create(stateEntity);
     hostComponentDesiredStateDAO.create(desiredStateEntity);
 
+    serviceComponentDesiredStateEntity.getHostComponentDesiredStateEntities().add(
+        desiredStateEntity);
+
     HostComponentStateEntity stateEntity = hostComponentStateDAO.findByIndex(serviceComponent.getClusterId(),
       serviceComponent.getServiceName(), serviceComponent.getName(), hostEntity.getHostId());
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/697c309c/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog150.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog150.java b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog150.java
index b00b0e8..ff94795 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog150.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog150.java
@@ -64,9 +64,7 @@ import org.apache.ambari.server.orm.entities.HostEntity;
 import org.apache.ambari.server.orm.entities.HostRoleCommandEntity;
 import org.apache.ambari.server.orm.entities.KeyValueEntity;
 import org.apache.ambari.server.orm.entities.ServiceComponentDesiredStateEntity;
-import org.apache.ambari.server.orm.entities.ServiceComponentDesiredStateEntityPK;
 import org.apache.ambari.server.orm.entities.StackEntity;
-import org.apache.ambari.server.orm.entities.UserEntity;
 import org.apache.ambari.server.state.HostComponentAdminState;
 import org.apache.ambari.server.state.PropertyInfo;
 import org.apache.ambari.server.state.ServiceInfo;
@@ -607,24 +605,16 @@ public class UpgradeCatalog150 extends AbstractUpgradeCatalog {
 
     List<ClusterEntity> clusterEntities = clusterDAO.findAll();
     for (final ClusterEntity clusterEntity : clusterEntities) {
-      ServiceComponentDesiredStateEntityPK pkHS = new ServiceComponentDesiredStateEntityPK();
-      pkHS.setComponentName("HISTORYSERVER");
-      pkHS.setClusterId(clusterEntity.getClusterId());
-      pkHS.setServiceName("MAPREDUCE");
-
-      ServiceComponentDesiredStateEntity serviceComponentDesiredStateEntityHS = serviceComponentDesiredStateDAO.findByPK(pkHS);
+      ServiceComponentDesiredStateEntity serviceComponentDesiredStateEntityHS = serviceComponentDesiredStateDAO.findByName(
+          clusterEntity.getClusterId(), "MAPREDUCE", "HISTORYSERVER");
 
       // already have historyserver
       if(serviceComponentDesiredStateEntityHS != null) {
         continue;
       }
 
-      ServiceComponentDesiredStateEntityPK pkJT = new ServiceComponentDesiredStateEntityPK();
-      pkJT.setComponentName("JOBTRACKER");
-      pkJT.setClusterId(clusterEntity.getClusterId());
-      pkJT.setServiceName("MAPREDUCE");
-
-      ServiceComponentDesiredStateEntity serviceComponentDesiredStateEntityJT = serviceComponentDesiredStateDAO.findByPK(pkJT);
+      ServiceComponentDesiredStateEntity serviceComponentDesiredStateEntityJT = serviceComponentDesiredStateDAO.findByName(
+          clusterEntity.getClusterId(), "MAPREDUCE", "JOBTRACKER");
 
       // no jobtracker present probably mapreduce is not installed
       if(serviceComponentDesiredStateEntityJT == null) {

http://git-wip-us.apache.org/repos/asf/ambari/blob/697c309c/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog170.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog170.java b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog170.java
index 91de82a..b3b1b70 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog170.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog170.java
@@ -86,7 +86,6 @@ import org.apache.ambari.server.orm.entities.PrivilegeEntity;
 import org.apache.ambari.server.orm.entities.ResourceEntity;
 import org.apache.ambari.server.orm.entities.ResourceTypeEntity;
 import org.apache.ambari.server.orm.entities.ServiceComponentDesiredStateEntity;
-import org.apache.ambari.server.orm.entities.ServiceComponentDesiredStateEntityPK;
 import org.apache.ambari.server.orm.entities.ServiceDesiredStateEntity;
 import org.apache.ambari.server.orm.entities.ServiceDesiredStateEntityPK;
 import org.apache.ambari.server.orm.entities.StackEntity;
@@ -769,11 +768,8 @@ public class UpgradeCatalog170 extends AbstractUpgradeCatalog {
 
     List<ClusterEntity> clusterEntities = clusterDAO.findAll();
     for (final ClusterEntity clusterEntity : clusterEntities) {
-      ServiceComponentDesiredStateEntityPK pkHCATInHcatalog = new ServiceComponentDesiredStateEntityPK();
-      pkHCATInHcatalog.setComponentName(componentName);
-      pkHCATInHcatalog.setClusterId(clusterEntity.getClusterId());
-      pkHCATInHcatalog.setServiceName(serviceNameToBeDeleted);
-      ServiceComponentDesiredStateEntity serviceComponentDesiredStateEntityToDelete = serviceComponentDesiredStateDAO.findByPK(pkHCATInHcatalog);
+      ServiceComponentDesiredStateEntity serviceComponentDesiredStateEntityToDelete = serviceComponentDesiredStateDAO.findByName(
+          clusterEntity.getClusterId(), serviceNameToBeDeleted, componentName);
 
       if (serviceComponentDesiredStateEntityToDelete == null) {
         continue;
@@ -803,6 +799,7 @@ public class UpgradeCatalog170 extends AbstractUpgradeCatalog {
       serviceComponentDesiredStateEntity.setDesiredStack(serviceComponentDesiredStateEntityToDelete.getDesiredStack());
       serviceComponentDesiredStateEntity.setDesiredState(serviceComponentDesiredStateEntityToDelete.getDesiredState());
       serviceComponentDesiredStateEntity.setClusterServiceEntity(clusterServiceEntity);
+      serviceComponentDesiredStateDAO.create(serviceComponentDesiredStateEntity);
 
       Iterator<HostComponentDesiredStateEntity> hostComponentDesiredStateIterator = serviceComponentDesiredStateEntityToDelete.getHostComponentDesiredStateEntities().iterator();
       Iterator<HostComponentStateEntity> hostComponentStateIterator = serviceComponentDesiredStateEntityToDelete.getHostComponentStateEntities().iterator();

http://git-wip-us.apache.org/repos/asf/ambari/blob/697c309c/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog200.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog200.java b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog200.java
index 70b8f9f..143f9af 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog200.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog200.java
@@ -45,7 +45,6 @@ import org.apache.ambari.server.orm.entities.ClusterServiceEntityPK;
 import org.apache.ambari.server.orm.entities.HostComponentDesiredStateEntity;
 import org.apache.ambari.server.orm.entities.HostComponentStateEntity;
 import org.apache.ambari.server.orm.entities.ServiceComponentDesiredStateEntity;
-import org.apache.ambari.server.orm.entities.ServiceComponentDesiredStateEntityPK;
 import org.apache.ambari.server.orm.entities.ServiceDesiredStateEntity;
 import org.apache.ambari.server.state.Cluster;
 import org.apache.ambari.server.state.Clusters;
@@ -516,11 +515,8 @@ public class UpgradeCatalog200 extends AbstractUpgradeCatalog {
           }
 
           // remove component state
-          ServiceComponentDesiredStateEntityPK primaryKey = new ServiceComponentDesiredStateEntityPK();
-          primaryKey.setClusterId(nagios.getClusterId());
-          primaryKey.setComponentName(componentDesiredState.getComponentName());
-          primaryKey.setServiceName(componentDesiredState.getServiceName());
-          componentDesiredStateDao.removeByPK(primaryKey);
+          componentDesiredStateDao.removeByName(nagios.getClusterId(),
+              componentDesiredState.getServiceName(), componentDesiredState.getComponentName());
         }
 
         // remove service state

http://git-wip-us.apache.org/repos/asf/ambari/blob/697c309c/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog210.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog210.java b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog210.java
index faf4b96..d97c0c1 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog210.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog210.java
@@ -18,12 +18,28 @@
 
 package org.apache.ambari.server.upgrade;
 
-import com.google.gson.Gson;
-import com.google.gson.JsonElement;
-import com.google.gson.JsonObject;
-import com.google.gson.JsonParser;
-import com.google.inject.Inject;
-import com.google.inject.Injector;
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.sql.Statement;
+import java.text.MessageFormat;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.UUID;
+import java.util.regex.Matcher;
+
+import javax.persistence.EntityManager;
+import javax.persistence.Query;
+import javax.persistence.criteria.CriteriaBuilder;
+import javax.persistence.criteria.CriteriaDelete;
+import javax.persistence.criteria.Root;
+
 import org.apache.ambari.server.AmbariException;
 import org.apache.ambari.server.api.services.AmbariMetaInfo;
 import org.apache.ambari.server.configuration.Configuration;
@@ -39,7 +55,6 @@ import org.apache.ambari.server.orm.entities.ArtifactEntity;
 import org.apache.ambari.server.orm.entities.HostComponentDesiredStateEntity;
 import org.apache.ambari.server.orm.entities.HostComponentStateEntity;
 import org.apache.ambari.server.orm.entities.ServiceComponentDesiredStateEntity;
-import org.apache.ambari.server.orm.entities.ServiceComponentDesiredStateEntityPK;
 import org.apache.ambari.server.orm.entities.StackEntity;
 import org.apache.ambari.server.state.Cluster;
 import org.apache.ambari.server.state.Clusters;
@@ -58,26 +73,12 @@ import org.eclipse.persistence.internal.databaseaccess.FieldTypeDefinition;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import javax.persistence.EntityManager;
-import javax.persistence.Query;
-import javax.persistence.criteria.CriteriaBuilder;
-import javax.persistence.criteria.CriteriaDelete;
-import javax.persistence.criteria.Root;
-import java.net.URI;
-import java.net.URISyntaxException;
-import java.sql.ResultSet;
-import java.sql.SQLException;
-import java.sql.Statement;
-import java.text.MessageFormat;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import java.util.UUID;
-import java.util.regex.Matcher;
+import com.google.gson.Gson;
+import com.google.gson.JsonElement;
+import com.google.gson.JsonObject;
+import com.google.gson.JsonParser;
+import com.google.inject.Inject;
+import com.google.inject.Injector;
 
 
 /**
@@ -1078,11 +1079,9 @@ public class UpgradeCatalog210 extends AbstractUpgradeCatalog {
             @Override
             public void run() {
             ServiceComponentDesiredStateDAO dao = injector.getInstance(ServiceComponentDesiredStateDAO.class);
-            ServiceComponentDesiredStateEntityPK entityPK = new ServiceComponentDesiredStateEntityPK();
-            entityPK.setClusterId(cluster.getClusterId());
-            entityPK.setServiceName("STORM");
-            entityPK.setComponentName("STORM_REST_API");
-            ServiceComponentDesiredStateEntity entity = dao.findByPK(entityPK);
+              ServiceComponentDesiredStateEntity entity = dao.findByName(cluster.getClusterId(),
+                  "STORM", "STORM_REST_API");
+
             if (entity != null) {
               EntityManager em = getEntityManagerProvider().get();
               CriteriaBuilder cb = em.getCriteriaBuilder();
@@ -1227,7 +1226,7 @@ public class UpgradeCatalog210 extends AbstractUpgradeCatalog {
             JsonObject reporting = rootJson.getAsJsonObject("reporting");
             JsonObject ok = reporting.getAsJsonObject("ok");
             JsonObject warning = reporting.getAsJsonObject("warning");
-            JsonObject critical = reporting.getAsJsonObject("critical");            
+            JsonObject critical = reporting.getAsJsonObject("critical");
 
             rootJson.remove("type");
             rootJson.remove("default_port");
@@ -1533,8 +1532,8 @@ public class UpgradeCatalog210 extends AbstractUpgradeCatalog {
               hiveSiteAddProps.put("hive.server2.authentication.kerberos.keytab", "");
               hiveSiteAddProps.put("hive.server2.authentication.kerberos.principal", "");
             }
-            
-            
+
+
             updateConfigurationPropertiesForCluster(cluster, "hive-site", hiveSiteAddProps, hiveSiteRemoveProps, false, true);
           }
         }

http://git-wip-us.apache.org/repos/asf/ambari/blob/697c309c/ambari-server/src/main/resources/Ambari-DDL-Derby-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-Derby-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-Derby-CREATE.sql
index e1e2813..6c94999 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-Derby-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-Derby-CREATE.sql
@@ -171,14 +171,18 @@ CREATE TABLE host_version (
   PRIMARY KEY (id));
 
 CREATE TABLE servicecomponentdesiredstate (
+  id BIGINT NOT NULL,
   component_name VARCHAR(255) NOT NULL,
   cluster_id BIGINT NOT NULL,
   desired_stack_id BIGINT NOT NULL,
   desired_state VARCHAR(255) NOT NULL,
   service_name VARCHAR(255) NOT NULL,
-  PRIMARY KEY (component_name, cluster_id, service_name)
+  CONSTRAINT pk_servicecomponentdesiredstate PRIMARY KEY (id),
+  CONSTRAINT unq_scdesiredstate_name UNIQUE(component_name,service_name,cluster_id)
 );
 
+CREATE INDEX idx_sc_desired_state ON servicecomponentdesiredstate(component_name, service_name, cluster_id);
+
 CREATE TABLE servicedesiredstate (
   cluster_id BIGINT NOT NULL,
   desired_host_role_mapping INTEGER NOT NULL,
@@ -1039,7 +1043,9 @@ INSERT INTO ambari_sequences (sequence_name, sequence_value)
   union all
   select 'setting_id_seq', 0 FROM SYSIBM.SYSDUMMY1
   union all
-  select 'hostcomponentstate_id_seq', 0  FROM SYSIBM.SYSDUMMY1;
+  select 'hostcomponentstate_id_seq', 0  FROM SYSIBM.SYSDUMMY1
+  union all
+  select 'servicecomponentdesiredstate_id_seq', 0  FROM SYSIBM.SYSDUMMY1;
 
 INSERT INTO adminresourcetype (resource_type_id, resource_type_name)
   SELECT 1, 'AMBARI' FROM SYSIBM.SYSDUMMY1

http://git-wip-us.apache.org/repos/asf/ambari/blob/697c309c/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql
index 46b1983..7f5eddc 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql
@@ -172,14 +172,18 @@ CREATE TABLE host_version (
   PRIMARY KEY (id));
 
 CREATE TABLE servicecomponentdesiredstate (
+  id BIGINT NOT NULL,
   component_name VARCHAR(100) NOT NULL,
   cluster_id BIGINT NOT NULL,
   desired_stack_id BIGINT NOT NULL,
   desired_state VARCHAR(255) NOT NULL,
   service_name VARCHAR(100) NOT NULL,
-  PRIMARY KEY (component_name, cluster_id, service_name)
+  CONSTRAINT pk_servicecomponentdesiredstate PRIMARY KEY (id),
+  CONSTRAINT unq_scdesiredstate_name UNIQUE(component_name,service_name,cluster_id)
 );
 
+CREATE INDEX idx_sc_desired_state ON servicecomponentdesiredstate(component_name, service_name, cluster_id);
+
 CREATE TABLE servicedesiredstate (
   cluster_id BIGINT NOT NULL,
   desired_host_role_mapping INTEGER NOT NULL,

http://git-wip-us.apache.org/repos/asf/ambari/blob/697c309c/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql
index c320720..e3e4336 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql
@@ -162,14 +162,18 @@ CREATE TABLE host_version (
   PRIMARY KEY (id));
 
 CREATE TABLE servicecomponentdesiredstate (
+  id NUMBER(19) NOT NULL,
   component_name VARCHAR2(255) NOT NULL,
   cluster_id NUMBER(19) NOT NULL,
   desired_stack_id NUMBER(19) NOT NULL,
   desired_state VARCHAR2(255) NOT NULL,
   service_name VARCHAR2(255) NOT NULL,
-  PRIMARY KEY (component_name, cluster_id, service_name)
+  CONSTRAINT pk_servicecomponentdesiredstate PRIMARY KEY (alert_id),
+  CONSTRAINT unq_scdesiredstate_name UNIQUE(component_name,service_name,cluster_id)
 );
 
+CREATE INDEX idx_sc_desired_state ON servicecomponentdesiredstate(component_name, service_name, cluster_id);
+
 CREATE TABLE servicedesiredstate (
   cluster_id NUMBER(19) NOT NULL,
   desired_host_role_mapping NUMBER(10) NOT NULL,
@@ -994,6 +998,7 @@ INSERT INTO ambari_sequences(sequence_name, sequence_value) values ('topology_re
 INSERT INTO ambari_sequences(sequence_name, sequence_value) values ('topology_host_group_id_seq', 0);
 INSERT INTO ambari_sequences(sequence_name, sequence_value) values ('setting_id_seq', 0);
 INSERT INTO ambari_sequences(sequence_name, sequence_value) values ('hostcomponentstate_id_seq', 0);
+INSERT INTO ambari_sequences(sequence_name, sequence_value) values ('servicecomponentdesiredstate_id_seq', 0);
 
 INSERT INTO metainfo("metainfo_key", "metainfo_value") values ('version', '${ambariVersion}');
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/697c309c/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql
index e6e6103..f5983a5 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql
@@ -171,14 +171,19 @@ CREATE TABLE host_version (
   PRIMARY KEY (id));
 
 CREATE TABLE servicecomponentdesiredstate (
+  id BIGINT NOT NULL,
   component_name VARCHAR(255) NOT NULL,
   cluster_id BIGINT NOT NULL,
   desired_stack_id BIGINT NOT NULL,
   desired_state VARCHAR(255) NOT NULL,
   service_name VARCHAR(255) NOT NULL,
-  PRIMARY KEY (component_name, cluster_id, service_name)
+  CONSTRAINT pk_servicecomponentdesiredstate PRIMARY KEY (id),
+  CONSTRAINT unq_scdesiredstate_name UNIQUE(component_name,service_name,cluster_id)
 );
 
+CREATE INDEX idx_sc_desired_state ON servicecomponentdesiredstate(component_name, service_name, cluster_id);
+
+
 CREATE TABLE servicedesiredstate (
   cluster_id BIGINT NOT NULL,
   desired_host_role_mapping INTEGER NOT NULL,
@@ -1040,7 +1045,9 @@ INSERT INTO ambari_sequences (sequence_name, sequence_value)
   union all
   select 'setting_id_seq', 0
   union all
-  select 'hostcomponentstate_id_seq', 0;
+  select 'hostcomponentstate_id_seq', 0
+  union all
+  select 'servicecomponentdesiredstate_id_seq', 0;
 
 INSERT INTO adminresourcetype (resource_type_id, resource_type_name)
   SELECT 1, 'AMBARI'

http://git-wip-us.apache.org/repos/asf/ambari/blob/697c309c/ambari-server/src/main/resources/Ambari-DDL-Postgres-EMBEDDED-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-Postgres-EMBEDDED-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-Postgres-EMBEDDED-CREATE.sql
index 56d0947..9d9a986 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-Postgres-EMBEDDED-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-Postgres-EMBEDDED-CREATE.sql
@@ -196,15 +196,19 @@ CREATE TABLE ambari.host_version (
 GRANT ALL PRIVILEGES ON TABLE ambari.host_version TO :username;
 
 CREATE TABLE ambari.servicecomponentdesiredstate (
+  id BIGINT NOT NULL,
   component_name VARCHAR(255) NOT NULL,
   cluster_id BIGINT NOT NULL,
   desired_stack_id BIGINT NOT NULL,
   desired_state VARCHAR(255) NOT NULL,
   service_name VARCHAR(255) NOT NULL,
-  PRIMARY KEY (component_name, cluster_id, service_name)
+  CONSTRAINT pk_servicecomponentdesiredstate PRIMARY KEY (id),
+  CONSTRAINT unq_scdesiredstate_name UNIQUE(component_name,service_name,cluster_id)
 );
 GRANT ALL PRIVILEGES ON TABLE ambari.servicecomponentdesiredstate TO :username;
 
+CREATE INDEX idx_sc_desired_state ON ambari.servicecomponentdesiredstate(component_name, service_name, cluster_id);
+
 CREATE TABLE ambari.servicedesiredstate (
   cluster_id BIGINT NOT NULL,
   desired_host_role_mapping INTEGER NOT NULL,
@@ -1139,7 +1143,9 @@ INSERT INTO ambari.ambari_sequences (sequence_name, sequence_value)
   union all
   select 'setting_id_seq', 0
   union all
-  select 'hostcomponentstate_id_seq', 0;
+  select 'hostcomponentstate_id_seq', 0
+  union all
+  select 'servicecomponentdesiredstate_id_seq', 0;
 
 INSERT INTO ambari.adminresourcetype (resource_type_id, resource_type_name)
   SELECT 1, 'AMBARI'

http://git-wip-us.apache.org/repos/asf/ambari/blob/697c309c/ambari-server/src/main/resources/Ambari-DDL-SQLAnywhere-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-SQLAnywhere-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-SQLAnywhere-CREATE.sql
index 542b815..8c82afc 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-SQLAnywhere-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-SQLAnywhere-CREATE.sql
@@ -161,14 +161,18 @@ CREATE TABLE host_version (
   PRIMARY KEY (id));
 
 CREATE TABLE servicecomponentdesiredstate (
+  id NUMERIC(19) NOT NULL,
   component_name VARCHAR(255) NOT NULL,
   cluster_id NUMERIC(19) NOT NULL,
   desired_stack_id NUMERIC(19) NOT NULL,
   desired_state VARCHAR(255) NOT NULL,
   service_name VARCHAR(255) NOT NULL,
-  PRIMARY KEY (component_name, cluster_id, service_name)
+  CONSTRAINT pk_servicecomponentdesiredstate PRIMARY KEY (id),
+  CONSTRAINT unq_scdesiredstate_name UNIQUE(component_name,service_name,cluster_id)
 );
 
+CREATE INDEX idx_sc_desired_state ON servicecomponentdesiredstate(component_name, service_name, cluster_id);
+
 CREATE TABLE servicedesiredstate (
   cluster_id NUMERIC(19) NOT NULL,
   desired_host_role_mapping INTEGER NOT NULL,
@@ -991,6 +995,7 @@ INSERT INTO ambari_sequences(sequence_name, sequence_value) values ('topology_re
 INSERT INTO ambari_sequences(sequence_name, sequence_value) values ('topology_host_group_id_seq', 0);
 INSERT INTO ambari_sequences(sequence_name, sequence_value) values ('setting_id_seq', 0);
 INSERT INTO ambari_sequences(sequence_name, sequence_value) values ('hostcomponentstate_id_seq', 0);
+INSERT INTO ambari_sequences(sequence_name, sequence_value) values ('servicecomponentdesiredstate_id_seq', 0);
 
 insert into adminresourcetype (resource_type_id, resource_type_name)
   select 1, 'AMBARI'

http://git-wip-us.apache.org/repos/asf/ambari/blob/697c309c/ambari-server/src/main/resources/Ambari-DDL-SQLServer-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-SQLServer-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-SQLServer-CREATE.sql
index 885e422..e36519e 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-SQLServer-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-SQLServer-CREATE.sql
@@ -181,13 +181,18 @@ CREATE TABLE hoststate (
   PRIMARY KEY CLUSTERED (host_id));
 
 CREATE TABLE servicecomponentdesiredstate (
+  id BIGINT NOT NULL,
   component_name VARCHAR(255) NOT NULL,
   cluster_id BIGINT NOT NULL,
   desired_stack_id BIGINT NOT NULL,
   desired_state VARCHAR(255) NOT NULL,
   service_name VARCHAR(255) NOT NULL,
-  PRIMARY KEY CLUSTERED (component_name, cluster_id, service_name)
-  );
+  PRIMARY KEY CLUSTERED (id),
+  CONSTRAINT pk_servicecomponentdesiredstate PRIMARY KEY (id),
+  CONSTRAINT unq_scdesiredstate_name UNIQUE(component_name,service_name,cluster_id)
+);
+
+CREATE NONCLUSTERED INDEX idx_sc_desired_state ON servicecomponentdesiredstate(component_name, service_name, cluster_id);
 
 CREATE TABLE servicedesiredstate (
   cluster_id BIGINT NOT NULL,
@@ -1106,7 +1111,8 @@ BEGIN TRANSACTION
     ('topology_request_id_seq', 0),
     ('topology_host_group_id_seq', 0),
     ('setting_id_seq', 0),
-    ('hostcomponentstate_id_seq', 0);
+    ('hostcomponentstate_id_seq', 0),
+    ('servicecomponentdesiredstate_id_seq', 0);
 
   insert into adminresourcetype (resource_type_id, resource_type_name)
   values

http://git-wip-us.apache.org/repos/asf/ambari/blob/697c309c/ambari-server/src/test/java/org/apache/ambari/server/state/ServiceComponentTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/state/ServiceComponentTest.java b/ambari-server/src/test/java/org/apache/ambari/server/state/ServiceComponentTest.java
index ddab65d..4e8713b 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/state/ServiceComponentTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/state/ServiceComponentTest.java
@@ -39,7 +39,6 @@ import org.apache.ambari.server.orm.entities.HostComponentDesiredStateEntityPK;
 import org.apache.ambari.server.orm.entities.HostComponentStateEntity;
 import org.apache.ambari.server.orm.entities.HostEntity;
 import org.apache.ambari.server.orm.entities.ServiceComponentDesiredStateEntity;
-import org.apache.ambari.server.orm.entities.ServiceComponentDesiredStateEntityPK;
 import org.junit.After;
 import org.junit.Before;
 import org.junit.Test;
@@ -148,14 +147,8 @@ public class ServiceComponentTest {
     ServiceComponentDesiredStateDAO serviceComponentDesiredStateDAO =
         injector.getInstance(ServiceComponentDesiredStateDAO.class);
 
-    ServiceComponentDesiredStateEntityPK primaryKey =
-        new ServiceComponentDesiredStateEntityPK();
-    primaryKey.setClusterId(cluster.getClusterId());
-    primaryKey.setComponentName(componentName);
-    primaryKey.setServiceName(serviceName);
-
-    ServiceComponentDesiredStateEntity serviceComponentDesiredStateEntity =
-        serviceComponentDesiredStateDAO.findByPK(primaryKey);
+    ServiceComponentDesiredStateEntity serviceComponentDesiredStateEntity = serviceComponentDesiredStateDAO.findByName(
+        cluster.getClusterId(), serviceName, componentName);
 
     ServiceComponent sc1 = serviceComponentFactory.createExisting(service,
         serviceComponentDesiredStateEntity);

http://git-wip-us.apache.org/repos/asf/ambari/blob/697c309c/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog170Test.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog170Test.java b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog170Test.java
index 6bbcab7..10e4993 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog170Test.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog170Test.java
@@ -108,7 +108,6 @@ import org.apache.ambari.server.orm.entities.PrincipalEntity;
 import org.apache.ambari.server.orm.entities.PrivilegeEntity;
 import org.apache.ambari.server.orm.entities.ResourceEntity;
 import org.apache.ambari.server.orm.entities.ServiceComponentDesiredStateEntity;
-import org.apache.ambari.server.orm.entities.ServiceComponentDesiredStateEntityPK;
 import org.apache.ambari.server.orm.entities.StackEntity;
 import org.apache.ambari.server.orm.entities.UserEntity;
 import org.apache.ambari.server.orm.entities.ViewEntity;
@@ -620,11 +619,9 @@ public class UpgradeCatalog170Test {
 
     upgradeCatalog170.moveHcatalogIntoHiveService();
 
-    ServiceComponentDesiredStateEntityPK pkHCATInHive = new ServiceComponentDesiredStateEntityPK();
-    pkHCATInHive.setComponentName("HCAT");
-    pkHCATInHive.setClusterId(clusterEntity.getClusterId());
-    pkHCATInHive.setServiceName("HIVE");
-    ServiceComponentDesiredStateEntity serviceComponentDesiredStateEntity = serviceComponentDesiredStateDAO.findByPK(pkHCATInHive);
+    ServiceComponentDesiredStateEntity serviceComponentDesiredStateEntity = serviceComponentDesiredStateDAO.findByName(
+        clusterEntity.getClusterId(), "HIVE", "HCAT");
+
     assertNotNull(serviceComponentDesiredStateEntity);
 
     HostComponentDesiredStateEntityPK hcDesiredStateEntityPk  = new HostComponentDesiredStateEntityPK();

http://git-wip-us.apache.org/repos/asf/ambari/blob/697c309c/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog200Test.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog200Test.java b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog200Test.java
index 8ff23f8..dcac986 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog200Test.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog200Test.java
@@ -71,7 +71,6 @@ import org.apache.ambari.server.orm.entities.HostComponentDesiredStateEntityPK;
 import org.apache.ambari.server.orm.entities.HostComponentStateEntity;
 import org.apache.ambari.server.orm.entities.HostEntity;
 import org.apache.ambari.server.orm.entities.ServiceComponentDesiredStateEntity;
-import org.apache.ambari.server.orm.entities.ServiceComponentDesiredStateEntityPK;
 import org.apache.ambari.server.orm.entities.StackEntity;
 import org.apache.ambari.server.state.Cluster;
 import org.apache.ambari.server.state.Clusters;
@@ -639,11 +638,9 @@ public class UpgradeCatalog200Test {
     upgradeCatalogHelper.addComponent(injector, clusterEntity,
         clusterServiceEntityNagios, hostEntity, "NAGIOS_SERVER", stackEntity);
 
-    ServiceComponentDesiredStateEntityPK pkNagiosServer = new ServiceComponentDesiredStateEntityPK();
-    pkNagiosServer.setComponentName("NAGIOS_SERVER");
-    pkNagiosServer.setClusterId(clusterEntity.getClusterId());
-    pkNagiosServer.setServiceName("NAGIOS");
-    ServiceComponentDesiredStateEntity serviceComponentDesiredStateEntity = serviceComponentDesiredStateDAO.findByPK(pkNagiosServer);
+    ServiceComponentDesiredStateEntity serviceComponentDesiredStateEntity = serviceComponentDesiredStateDAO.findByName(
+        clusterEntity.getClusterId(), "NAGIOS", "NAGIOS_SERVER");
+
     assertNotNull(serviceComponentDesiredStateEntity);
 
     HostComponentDesiredStateEntityPK hcDesiredStateEntityPk = new HostComponentDesiredStateEntityPK();

http://git-wip-us.apache.org/repos/asf/ambari/blob/697c309c/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog210Test.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog210Test.java b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog210Test.java
index 83018a2..c641bbf 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog210Test.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog210Test.java
@@ -18,13 +18,37 @@
 
 package org.apache.ambari.server.upgrade;
 
-import com.google.inject.AbstractModule;
-import com.google.inject.Binder;
-import com.google.inject.Guice;
-import com.google.inject.Injector;
-import com.google.inject.Module;
-import com.google.inject.Provider;
-import com.google.inject.persist.PersistService;
+import static junit.framework.Assert.assertEquals;
+import static junit.framework.Assert.assertNotNull;
+import static junit.framework.Assert.assertNull;
+import static org.easymock.EasyMock.anyObject;
+import static org.easymock.EasyMock.capture;
+import static org.easymock.EasyMock.createMockBuilder;
+import static org.easymock.EasyMock.createNiceMock;
+import static org.easymock.EasyMock.createStrictMock;
+import static org.easymock.EasyMock.eq;
+import static org.easymock.EasyMock.expect;
+import static org.easymock.EasyMock.expectLastCall;
+import static org.easymock.EasyMock.replay;
+import static org.easymock.EasyMock.reset;
+import static org.easymock.EasyMock.verify;
+
+import java.io.File;
+import java.lang.reflect.Field;
+import java.lang.reflect.Method;
+import java.net.URL;
+import java.sql.Connection;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.sql.Statement;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+
+import javax.persistence.EntityManager;
+
 import org.apache.ambari.server.api.services.AmbariMetaInfo;
 import org.apache.ambari.server.configuration.Configuration;
 import org.apache.ambari.server.controller.AmbariManagementController;
@@ -46,7 +70,6 @@ import org.apache.ambari.server.orm.entities.ClusterStateEntity;
 import org.apache.ambari.server.orm.entities.HostComponentDesiredStateEntity;
 import org.apache.ambari.server.orm.entities.HostEntity;
 import org.apache.ambari.server.orm.entities.ServiceComponentDesiredStateEntity;
-import org.apache.ambari.server.orm.entities.ServiceComponentDesiredStateEntityPK;
 import org.apache.ambari.server.orm.entities.StackEntity;
 import org.apache.ambari.server.state.Cluster;
 import org.apache.ambari.server.state.Clusters;
@@ -66,35 +89,13 @@ import org.junit.Assert;
 import org.junit.Before;
 import org.junit.Test;
 
-import javax.persistence.EntityManager;
-import java.io.File;
-import java.lang.reflect.Field;
-import java.lang.reflect.Method;
-import java.net.URL;
-import java.sql.Connection;
-import java.sql.ResultSet;
-import java.sql.SQLException;
-import java.sql.Statement;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-
-import static junit.framework.Assert.assertEquals;
-import static junit.framework.Assert.assertNotNull;
-import static junit.framework.Assert.assertNull;
-import static org.easymock.EasyMock.anyObject;
-import static org.easymock.EasyMock.capture;
-import static org.easymock.EasyMock.createMockBuilder;
-import static org.easymock.EasyMock.createNiceMock;
-import static org.easymock.EasyMock.createStrictMock;
-import static org.easymock.EasyMock.eq;
-import static org.easymock.EasyMock.expect;
-import static org.easymock.EasyMock.expectLastCall;
-import static org.easymock.EasyMock.replay;
-import static org.easymock.EasyMock.reset;
-import static org.easymock.EasyMock.verify;
+import com.google.inject.AbstractModule;
+import com.google.inject.Binder;
+import com.google.inject.Guice;
+import com.google.inject.Injector;
+import com.google.inject.Module;
+import com.google.inject.Provider;
+import com.google.inject.persist.PersistService;
 
 /**
  * {@link org.apache.ambari.server.upgrade.UpgradeCatalog210} unit tests.
@@ -601,14 +602,15 @@ public class UpgradeCatalog210Test {
 
     Assert.assertEquals(HostComponentAdminState.INSERVICE.name(), entity.getAdminState().name());
 
+    // ensure the desired state exists
+    Assert.assertNotNull(componentDesiredStateDAO.findByName(clusterEntity.getClusterId(), "STORM",
+        "STORM_REST_API"));
+
     UpgradeCatalog210 upgradeCatalog210 = injector.getInstance(UpgradeCatalog210.class);
     upgradeCatalog210.removeStormRestApiServiceComponent();
 
-    ServiceComponentDesiredStateEntityPK entityPK = new ServiceComponentDesiredStateEntityPK();
-    entityPK.setClusterId(clusterEntity.getClusterId());
-    entityPK.setServiceName("STORM");
-    entityPK.setComponentName("STORM_REST_API");
-    Assert.assertNull(componentDesiredStateDAO.findByPK(entityPK));
+    Assert.assertNull(componentDesiredStateDAO.findByName(clusterEntity.getClusterId(), "STORM",
+        "STORM_REST_API"));
   }
 
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/697c309c/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalogHelper.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalogHelper.java b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalogHelper.java
index 4c48972..4c11d10 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalogHelper.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalogHelper.java
@@ -170,6 +170,9 @@ public class UpgradeCatalogHelper {
   protected void addComponent(Injector injector, ClusterEntity clusterEntity,
       ClusterServiceEntity clusterServiceEntity, HostEntity hostEntity,
       String componentName, StackEntity desiredStackEntity) {
+    ServiceComponentDesiredStateDAO serviceComponentDesiredStateDAO = injector.getInstance(
+        ServiceComponentDesiredStateDAO.class);
+
     ServiceComponentDesiredStateEntity componentDesiredStateEntity = new ServiceComponentDesiredStateEntity();
     componentDesiredStateEntity.setClusterServiceEntity(clusterServiceEntity);
     componentDesiredStateEntity.setComponentName(componentName);
@@ -177,6 +180,7 @@ public class UpgradeCatalogHelper {
     componentDesiredStateEntity.setDesiredStack(desiredStackEntity);
     componentDesiredStateEntity.setClusterServiceEntity(clusterServiceEntity);
     componentDesiredStateEntity.setClusterId(clusterServiceEntity.getClusterId());
+    serviceComponentDesiredStateDAO.create(componentDesiredStateEntity);
 
     HostComponentDesiredStateDAO hostComponentDesiredStateDAO = injector.getInstance(HostComponentDesiredStateDAO.class);
     HostComponentDesiredStateEntity hostComponentDesiredStateEntity = new HostComponentDesiredStateEntity();
@@ -208,7 +212,6 @@ public class UpgradeCatalogHelper {
         componentDesiredStateEntity);
 
     ClusterServiceDAO clusterServiceDAO = injector.getInstance(ClusterServiceDAO.class);
-    ServiceComponentDesiredStateDAO serviceComponentDesiredStateDAO = injector.getInstance(ServiceComponentDesiredStateDAO.class);
     HostDAO hostDAO = injector.getInstance(HostDAO.class);
     serviceComponentDesiredStateDAO.merge(componentDesiredStateEntity);
     hostDAO.merge(hostEntity);


[05/50] [abbrv] ambari git commit: Merge branch 'trunk' into branch-dev-patch-upgrade

Posted by nc...@apache.org.
Merge branch 'trunk' into branch-dev-patch-upgrade


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/abc961a1
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/abc961a1
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/abc961a1

Branch: refs/heads/trunk
Commit: abc961a13a15cdced7bb3678e4ac67364ba656aa
Parents: fc402eb 77ea2eb
Author: Nate Cole <nc...@hortonworks.com>
Authored: Thu Jan 21 14:20:49 2016 -0500
Committer: Nate Cole <nc...@hortonworks.com>
Committed: Thu Jan 21 14:20:49 2016 -0500

----------------------------------------------------------------------
 .../python/resource_management/TestScript.py    |   4 +-
 .../src/main/python/ambari_commons/firewall.py  |   6 +-
 .../libraries/functions/get_lzo_packages.py     |   5 +-
 .../libraries/functions/package_conditions.py   |  64 ++++
 .../libraries/functions/ranger_functions.py     |   2 +-
 .../libraries/functions/ranger_functions_v2.py  |   2 +-
 .../libraries/script/script.py                  |  21 +-
 .../2.1.1/services/AMBARI_METRICS/metainfo.xml  |   1 +
 .../metrics/timeline/PhoenixHBaseAccessor.java  |   2 +-
 .../aggregators/TimelineMetricReadHelper.java   |   4 +-
 .../server/checks/AbstractCheckDescriptor.java  |   4 +
 .../ambari/server/checks/CheckDescription.java  |   7 +
 .../HiveDynamicServiceDiscoveryCheck.java       |   2 +-
 .../checks/HiveMultipleMetastoreCheck.java      |   2 +-
 .../server/checks/HostMaintenanceModeCheck.java |   2 +-
 .../checks/HostsMasterMaintenanceCheck.java     |   2 +-
 .../server/checks/InstallPackagesCheck.java     |   2 +-
 ...apReduce2JobHistoryStatePreservingCheck.java |   2 +-
 .../server/checks/PreviousUpgradeCompleted.java | 144 +++++++++
 .../server/checks/RangerPasswordCheck.java      |   2 +-
 .../checks/SecondaryNamenodeDeletedCheck.java   |   2 +-
 .../checks/ServicesMaintenanceModeCheck.java    |   2 +-
 .../ServicesMapReduceDistributedCacheCheck.java |   2 +-
 .../ServicesNamenodeHighAvailabilityCheck.java  |   2 +-
 .../checks/ServicesNamenodeTruncateCheck.java   |   2 +-
 .../ServicesTezDistributedCacheCheck.java       |   2 +-
 .../checks/ServicesYarnWorkPreservingCheck.java |   2 +-
 .../server/checks/StormRestAPIDeletedCheck.java |   2 +-
 .../checks/YarnRMHighAvailabilityCheck.java     |   2 +-
 .../YarnTimelineServerStatePreservingCheck.java |   2 +-
 .../AmbariManagementControllerImpl.java         |   9 +
 .../ambari/server/orm/dao/ClusterDAO.java       |  19 ++
 .../server/orm/dao/HostRoleCommandDAO.java      |   9 +
 .../apache/ambari/server/state/ServiceImpl.java |  28 +-
 .../ambari/server/state/ServiceOsSpecific.java  |  17 +-
 .../server/topology/ClusterTopologyImpl.java    |   7 +-
 .../server/upgrade/SchemaUpgradeHelper.java     |   1 +
 .../server/upgrade/UpgradeCatalog240.java       | 298 +++++++++++++++++++
 .../0.1.0/package/scripts/metrics_monitor.py    |   4 +-
 .../0.96.0.2.0/configuration/hbase-env.xml      |   2 +-
 .../HBASE/0.96.0.2.0/package/scripts/hbase.py   |   3 +-
 .../0.96.0.2.0/package/scripts/hbase_client.py  |   4 +-
 .../0.96.0.2.0/package/scripts/hbase_master.py  |   3 +-
 .../package/scripts/hbase_regionserver.py       |   3 +-
 .../0.96.0.2.0/package/scripts/params_linux.py  |   5 -
 .../package/scripts/phoenix_queryserver.py      |   3 +-
 .../common-services/HDFS/2.1.0.2.0/metainfo.xml |   4 +
 .../HDFS/2.1.0.2.0/package/scripts/datanode.py  |   4 +-
 .../2.1.0.2.0/package/scripts/hdfs_client.py    |   4 +-
 .../2.1.0.2.0/package/scripts/journalnode.py    |   4 +-
 .../HDFS/2.1.0.2.0/package/scripts/namenode.py  |   4 +-
 .../2.1.0.2.0/package/scripts/nfsgateway.py     |   2 +-
 .../2.1.0.2.0/package/scripts/params_linux.py   |   4 -
 .../HDFS/2.1.0.2.0/package/scripts/snamenode.py |   2 +-
 .../2.1.0.2.0/package/scripts/zkfc_slave.py     |   2 +-
 .../HIVE/0.12.0.2.0/metainfo.xml                |   4 +
 .../0.12.0.2.0/package/scripts/hcat_client.py   |   2 +-
 .../0.12.0.2.0/package/scripts/hive_client.py   |   2 +-
 .../package/scripts/hive_metastore.py           |   2 +-
 .../0.12.0.2.0/package/scripts/hive_server.py   |   2 +-
 .../0.12.0.2.0/package/scripts/mysql_server.py  |   2 +-
 .../0.12.0.2.0/package/scripts/params_linux.py  |  18 +-
 .../package/scripts/params_windows.py           |   2 -
 .../package/scripts/webhcat_server.py           |   2 +-
 .../KERBEROS/1.10.3-10/metainfo.xml             |   6 +
 .../package/scripts/kerberos_client.py          |   2 +-
 .../common-services/PXF/3.0.0/metainfo.xml      |   6 +
 .../PXF/3.0.0/package/scripts/params.py         |  31 +-
 .../PXF/3.0.0/package/scripts/pxf_constants.py  |  26 ++
 .../PXF/3.0.0/package/scripts/pxf_utils.py      |  49 +++
 .../PXF/3.0.0/package/scripts/service_check.py  | 266 +++++++++++++++++
 .../SQOOP/1.4.4.2.0/metainfo.xml                |   1 +
 .../custom_actions/scripts/install_packages.py  |  17 +-
 .../stacks/HDP/2.1/services/HIVE/metainfo.xml   |   4 +
 .../stacks/HDP/2.2/services/HBASE/metainfo.xml  |   2 +
 .../stacks/HDP/2.2/services/HDFS/metainfo.xml   |   4 +
 .../stacks/HDP/2.2/services/HIVE/metainfo.xml   |   5 +
 .../stacks/HDP/2.2/services/OOZIE/metainfo.xml  |   1 +
 .../stacks/HDP/2.2/services/SQOOP/metainfo.xml  |   1 +
 .../HDP/2.2/upgrades/nonrolling-upgrade-2.3.xml |   6 +
 .../HDP/2.2/upgrades/nonrolling-upgrade-2.4.xml |   6 +
 .../stacks/HDP/2.2/upgrades/upgrade-2.3.xml     |   3 +
 .../stacks/HDP/2.2/upgrades/upgrade-2.4.xml     |   3 +
 .../stacks/HDP/2.3/role_command_order.json      |   6 +-
 .../stacks/HDP/2.3/services/HDFS/metainfo.xml   |   3 +
 .../stacks/HDP/2.3/services/HIVE/metainfo.xml   |   7 +
 .../stacks/HDP/2.3/services/OOZIE/metainfo.xml  |   1 +
 .../stacks/HDP/2.3/upgrades/config-upgrade.xml  |  17 ++
 .../HDP/2.3/upgrades/nonrolling-upgrade-2.3.xml |   6 +
 .../HDP/2.3/upgrades/nonrolling-upgrade-2.4.xml |   9 +
 .../stacks/HDP/2.3/upgrades/upgrade-2.3.xml     |   5 +
 .../stacks/HDP/2.3/upgrades/upgrade-2.4.xml     |   5 +
 .../stacks/HDP/2.4/upgrades/config-upgrade.xml  |  23 ++
 .../server/checks/UpgradeCheckOrderTest.java    |  26 +-
 .../AmbariManagementControllerImplTest.java     |   5 +
 .../timeline/AMSPropertyProviderTest.java       | 179 ++++++++++-
 .../ambari/server/state/CheckHelperTest.java    |   2 +
 .../server/state/cluster/ClusterTest.java       |  19 +-
 .../topology/ClusterTopologyImplTest.java       |  28 +-
 .../server/upgrade/UpgradeCatalog240Test.java   | 130 ++++++++
 .../src/test/python/TestAmbariServer.py         |   4 +-
 .../configs/install_packages_config.json        |   4 +-
 .../stacks/2.0.6/configs/hbase_no_phx.json      |   2 +-
 .../stacks/2.0.6/configs/hbase_with_phx.json    |   2 +-
 .../python/stacks/2.3/configs/pxf_default.json  |   2 +-
 .../ams/single_host_component_metrics_h1.json   |  21 ++
 .../ams/single_host_component_metrics_h2.json   |  22 ++
 ambari-web/app/assets/test/tests.js             |  43 +++
 ambari-web/app/controllers/main/service/item.js | 176 ++++++-----
 .../app/mixins/common/configs/configs_saver.js  |   4 +
 .../mixins/common/widgets/time_range_mixin.js   |  32 +-
 ambari-web/app/router.js                        |   3 +
 ambari-web/app/utils/ajax/ajax.js               |   6 +-
 ambari-web/app/utils/ajax/ajax_queue.js         |  29 +-
 ambari-web/app/utils/helper.js                  |   2 +-
 .../app/views/common/chart/linear_time.js       |  18 +-
 .../app/views/common/custom_date_popup.js       |   7 +-
 .../app/views/common/select_custom_date_view.js |   4 +-
 ambari-web/app/views/main/service/item.js       | 120 ++++----
 ambari-web/package.json                         |   2 +-
 ambari-web/test/controllers/application_test.js |  24 +-
 .../global/background_operations_test.js        |  19 +-
 .../global/cluster_controller_test.js           |  71 ++---
 .../global/update_controller_test.js            |  23 +-
 .../global/user_settings_controller_test.js     |   4 -
 ambari-web/test/controllers/installer_test.js   |  10 -
 .../progress_controller_test.js                 |  11 +-
 .../progress_popup_controller_test.js           |  43 +--
 .../resourceManager/step3_controller_test.js    |  11 +-
 .../controllers/main/admin/kerberos_test.js     |  30 +-
 .../admin/stack_and_upgrade_controller_test.js  | 175 ++++-------
 .../main/alert_definitions_controller_test.js   |   9 +-
 .../add_alert_definition_controller_test.js     |  20 +-
 .../alerts/alert_instances_controller_test.js   |  21 +-
 .../definitions_configs_controller_test.js      |  10 +-
 .../definitions_details_controller_test.js      |  37 +--
 ...anage_alert_notifications_controller_test.js |  15 +-
 .../controllers/main/charts/heatmap_test.js     |  61 +---
 .../dashboard/config_history_controller_test.js |  26 +-
 .../main/host/add_controller_test.js            |  42 ++-
 .../test/controllers/main/host/details_test.js  | 197 ++++++------
 ambari-web/test/controllers/main/host_test.js   |  14 +-
 .../main/service/add_controller_test.js         |  75 +----
 .../main/service/info/config_test.js            |  21 +-
 .../main/service/info/summary_test.js           | 107 +------
 .../test/controllers/main/service/item_test.js  |  27 +-
 .../service/reassign/step1_controller_test.js   |  27 +-
 .../service/reassign/step4_controller_test.js   | 201 ++++++-------
 .../service/reassign/step6_controller_test.js   |  24 +-
 .../service/reassign/step7_controller_test.js   |  53 ++--
 .../test/controllers/main/service_test.js       |  31 +-
 .../controllers/main/views_controller_test.js   |   9 +-
 ambari-web/test/controllers/main_test.js        |  25 +-
 .../test/controllers/wizard/step2_test.js       |  11 +-
 .../test/controllers/wizard/step3_test.js       |  95 ++----
 .../test/controllers/wizard/step6_test.js       |   9 -
 .../test/controllers/wizard/step7_test.js       |  18 +-
 .../test/controllers/wizard/step8_test.js       |  85 ++----
 .../test/controllers/wizard/step9_test.js       |  31 +-
 ambari-web/test/controllers/wizard_test.js      |   8 +-
 ambari-web/test/helpers.js                      |  77 +++++
 .../common/table_server_view_mixin_test.js      |   1 +
 .../test/mixins/common/widget_mixin_test.js     |  91 +++---
 .../common/widgets/export_metrics_mixin_test.js |  15 +-
 .../common/widgets/time_range_mixin_test.js     |  10 +-
 .../host_components/decommissionable_test.js    |  25 +-
 .../host_components/install_component_test.js   |  97 +++---
 .../wizard/wizardProgressPageController_test.js |  24 +-
 ambari-web/test/models/cluster_states_test.js   |   2 -
 ambari-web/test/models/host_test.js             |   4 -
 ambari-web/test/router_test.js                  |  40 +--
 ambari-web/test/utils/ajax/ajax_queue_test.js   |   8 -
 ambari-web/test/utils/ajax/ajax_test.js         |  17 +-
 .../test/utils/batch_scheduled_requests_test.js |  71 -----
 .../test/utils/host_progress_popup_test.js      |   2 -
 ambari-web/test/utils/updater_test.js           | 149 +++++-----
 .../test/views/common/chart/linear_time_test.js |  16 +-
 .../common/configs/config_history_flow_test.js  |  12 +-
 .../test/views/common/controls_view_test.js     |   9 +-
 .../test/views/common/quick_link_view_test.js   |  59 ++--
 .../common/select_custom_date_view_test.js      |  49 +++
 .../common/widget/gauge_widget_view_test.js     |  24 +-
 .../common/widget/graph_widget_view_test.js     |  12 +-
 .../common/widget/number_widget_view_test.js    |  24 +-
 .../stack_upgrade/upgrade_group_view_test.js    |  16 +-
 .../stack_upgrade/upgrade_wizard_view_test.js   | 107 ++++---
 .../admin/stack_upgrade/version_view_test.js    |  85 +++---
 .../test/views/main/dashboard/widget_test.js    |  11 +-
 .../main/dashboard/widgets/namenode_cpu_test.js |  31 +-
 .../test/views/main/dashboard/widgets_test.js   |  18 +-
 .../host_component_views/datanode_view_test.js  |  15 +-
 .../decommissionable_test.js                    |  28 +-
 ambari-web/test/views/main/service/item_test.js |  16 +-
 .../views/main/service/services/ranger_test.js  |  10 -
 194 files changed, 3023 insertions(+), 2076 deletions(-)
----------------------------------------------------------------------



[07/50] [abbrv] ambari git commit: AMBARI-14804. Move Version Definition to its own endpoint (ncole)

Posted by nc...@apache.org.
AMBARI-14804. Move Version Definition to its own endpoint (ncole)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/986a5188
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/986a5188
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/986a5188

Branch: refs/heads/trunk
Commit: 986a5188334b51cf5728b4b54c2aefc48866558e
Parents: c7be26a
Author: Nate Cole <nc...@hortonworks.com>
Authored: Tue Jan 26 15:30:06 2016 -0500
Committer: Nate Cole <nc...@hortonworks.com>
Committed: Wed Jan 27 09:36:31 2016 -0500

----------------------------------------------------------------------
 .../resources/ResourceInstanceFactoryImpl.java  |   4 +
 .../VersionDefinitionResourceDefinition.java    |  93 ++++++
 .../api/services/VersionDefinitionService.java  |  70 ++++
 .../AbstractControllerResourceProvider.java     |   3 +-
 .../RepositoryVersionResourceProvider.java      |  93 ++----
 .../VersionDefinitionResourceProvider.java      | 331 +++++++++++++++++++
 .../ambari/server/controller/spi/Resource.java  |   4 +-
 .../server/orm/dao/RepositoryVersionDAO.java    |  16 +-
 .../orm/entities/RepositoryVersionEntity.java   |  17 +-
 .../ambari/server/state/repository/Release.java |  20 ++
 .../src/main/resources/version_definition.xsd   |   1 +
 .../RepositoryVersionResourceProviderTest.java  |  39 ++-
 12 files changed, 601 insertions(+), 90 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/986a5188/ambari-server/src/main/java/org/apache/ambari/server/api/resources/ResourceInstanceFactoryImpl.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/api/resources/ResourceInstanceFactoryImpl.java b/ambari-server/src/main/java/org/apache/ambari/server/api/resources/ResourceInstanceFactoryImpl.java
index 070a505..987c11b 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/api/resources/ResourceInstanceFactoryImpl.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/api/resources/ResourceInstanceFactoryImpl.java
@@ -412,6 +412,10 @@ public class ResourceInstanceFactoryImpl implements ResourceInstanceFactory {
         resourceDefinition = new SimpleResourceDefinition(Resource.Type.AdminSetting, "admin-setting", "admin-settings");
         break;
 
+      case VersionDefinition:
+        resourceDefinition = new VersionDefinitionResourceDefinition();
+        break;
+
       default:
         throw new IllegalArgumentException("Unsupported resource type: " + type);
     }

http://git-wip-us.apache.org/repos/asf/ambari/blob/986a5188/ambari-server/src/main/java/org/apache/ambari/server/api/resources/VersionDefinitionResourceDefinition.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/api/resources/VersionDefinitionResourceDefinition.java b/ambari-server/src/main/java/org/apache/ambari/server/api/resources/VersionDefinitionResourceDefinition.java
new file mode 100644
index 0000000..67d9439
--- /dev/null
+++ b/ambari-server/src/main/java/org/apache/ambari/server/api/resources/VersionDefinitionResourceDefinition.java
@@ -0,0 +1,93 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.server.api.resources;
+
+import java.util.List;
+
+import org.apache.ambari.server.api.services.Request;
+import org.apache.ambari.server.api.util.TreeNode;
+import org.apache.ambari.server.controller.internal.RepositoryVersionResourceProvider;
+import org.apache.ambari.server.controller.internal.VersionDefinitionResourceProvider;
+import org.apache.ambari.server.controller.spi.Resource;
+
+import com.google.common.collect.Lists;
+
+/**
+ * The Resource Definition used for Version Definition files.
+ */
+public class VersionDefinitionResourceDefinition extends BaseResourceDefinition {
+  private static final String STACKS_NAME = new StackResourceDefinition().getPluralName();
+  private static final String STACK_VERSIONS_NAME = new StackVersionResourceDefinition().getPluralName();
+  private static final String REPO_VERSIONS_NAME = new RepositoryVersionResourceDefinition().getPluralName();
+
+  private static final String HREF_TEMPLATE =
+      STACKS_NAME + "/%s/" + STACK_VERSIONS_NAME + "/%s/" + REPO_VERSIONS_NAME;
+
+
+  public VersionDefinitionResourceDefinition() {
+    super(Resource.Type.VersionDefinition);
+  }
+
+  @Override
+  public String getPluralName() {
+    return "version_definitions";
+  }
+
+  @Override
+  public String getSingularName() {
+    return "version_definition";
+  }
+
+  @Override
+  public List<PostProcessor> getPostProcessors() {
+    List<PostProcessor> list = Lists.newArrayList();
+
+    list.add(new HrefPostProcessor());
+
+    return list;
+  }
+
+
+  class HrefPostProcessor extends BaseHrefPostProcessor {
+    @Override
+    public void process(Request request, TreeNode<Resource> resultNode, String href) {
+      super.process(request, resultNode, href);
+
+      Object stackNameObj = resultNode.getObject().getPropertyValue(
+          VersionDefinitionResourceProvider.VERSION_DEF_STACK_NAME);
+      Object stackVersionObj = resultNode.getObject().getPropertyValue(
+          VersionDefinitionResourceProvider.VERSION_DEF_STACK_VERSION);
+
+      if (resultNode.getObject().getType() == Resource.Type.VersionDefinition &&
+          null != stackNameObj && null != stackVersionObj &&
+          null != resultNode.getProperty("href")) {
+
+        String oldHref = resultNode.getProperty("href").toString();
+
+        String newPath = String.format(HREF_TEMPLATE, stackNameObj, stackVersionObj);
+
+        String newHref = oldHref.replace(getPluralName(), newPath);
+        newHref = newHref.replace(VersionDefinitionResourceProvider.VERSION_DEF,
+            RepositoryVersionResourceProvider.REPOSITORY_VERSION);
+
+        resultNode.setProperty("href", newHref);
+      }
+    }
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/986a5188/ambari-server/src/main/java/org/apache/ambari/server/api/services/VersionDefinitionService.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/api/services/VersionDefinitionService.java b/ambari-server/src/main/java/org/apache/ambari/server/api/services/VersionDefinitionService.java
new file mode 100644
index 0000000..43eb424
--- /dev/null
+++ b/ambari-server/src/main/java/org/apache/ambari/server/api/services/VersionDefinitionService.java
@@ -0,0 +1,70 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.api.services;
+
+import java.util.Collections;
+
+import javax.ws.rs.GET;
+import javax.ws.rs.POST;
+import javax.ws.rs.Path;
+import javax.ws.rs.PathParam;
+import javax.ws.rs.Produces;
+import javax.ws.rs.core.Context;
+import javax.ws.rs.core.HttpHeaders;
+import javax.ws.rs.core.Response;
+import javax.ws.rs.core.UriInfo;
+
+import org.apache.ambari.server.api.resources.ResourceInstance;
+import org.apache.ambari.server.controller.spi.Resource;
+
+@Path("/version_definitions/")
+public class VersionDefinitionService extends BaseService {
+
+  @GET
+  @Produces("text/plain")
+  public Response getServices(@Context HttpHeaders headers, @Context UriInfo ui) {
+
+    return handleRequest(headers, null, ui, Request.Type.GET,
+      createResource(null));
+  }
+
+  @GET
+  @Path("{versionId}")
+  @Produces("text/plain")
+  public Response getService(@Context HttpHeaders headers, @Context UriInfo ui,
+      @PathParam("versionId") Long versionId) {
+
+    return handleRequest(headers, null, ui, Request.Type.GET,
+      createResource(versionId));
+  }
+
+  @POST
+  @Produces("text/plain")
+  public Response createVersion(String body, @Context HttpHeaders headers, @Context UriInfo ui) {
+    return handleRequest(headers, body, ui, Request.Type.POST,
+        createResource(null));
+  }
+
+  protected ResourceInstance createResource(Long versionId) {
+    return createResource(Resource.Type.VersionDefinition,
+        Collections.singletonMap(Resource.Type.VersionDefinition,
+            null == versionId ? null : versionId.toString()));
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/986a5188/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AbstractControllerResourceProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AbstractControllerResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AbstractControllerResourceProvider.java
index 61fbd8a..586924b 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AbstractControllerResourceProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AbstractControllerResourceProvider.java
@@ -219,7 +219,8 @@ public abstract class AbstractControllerResourceProvider extends AbstractAuthori
         return new RoleAuthorizationResourceProvider(managementController);
       case UserAuthorization:
         return new UserAuthorizationResourceProvider(managementController);
-
+      case VersionDefinition:
+        return new VersionDefinitionResourceProvider();
       default:
         throw new IllegalArgumentException("Unknown type " + type);
     }

http://git-wip-us.apache.org/repos/asf/ambari/blob/986a5188/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/RepositoryVersionResourceProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/RepositoryVersionResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/RepositoryVersionResourceProvider.java
index 92b14b7..873733d 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/RepositoryVersionResourceProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/RepositoryVersionResourceProvider.java
@@ -17,7 +17,6 @@
  */
 package org.apache.ambari.server.controller.internal;
 
-import java.net.URL;
 import java.text.MessageFormat;
 import java.util.ArrayList;
 import java.util.Collection;
@@ -65,7 +64,6 @@ import org.apache.ambari.server.state.StackInfo;
 import org.apache.ambari.server.state.repository.VersionDefinitionXml;
 import org.apache.ambari.server.state.stack.UpgradePack;
 import org.apache.ambari.server.state.stack.upgrade.RepositoryVersionHelper;
-import org.apache.commons.io.IOUtils;
 import org.apache.commons.lang.ObjectUtils;
 import org.apache.commons.lang.StringUtils;
 
@@ -82,7 +80,7 @@ import com.google.inject.persist.Transactional;
 public class RepositoryVersionResourceProvider extends AbstractAuthorizedResourceProvider {
 
   // ----- Property ID constants ---------------------------------------------
-
+  public static final String REPOSITORY_VERSION                                = "RepositoryVersions";
   public static final String REPOSITORY_VERSION_ID_PROPERTY_ID                 = PropertyHelper.getPropertyId("RepositoryVersions", "id");
   public static final String REPOSITORY_VERSION_STACK_NAME_PROPERTY_ID         = PropertyHelper.getPropertyId("RepositoryVersions", "stack_name");
   public static final String REPOSITORY_VERSION_STACK_VERSION_PROPERTY_ID      = PropertyHelper.getPropertyId("RepositoryVersions", "stack_version");
@@ -92,7 +90,6 @@ public class RepositoryVersionResourceProvider extends AbstractAuthorizedResourc
   public static final String SUBRESOURCE_REPOSITORIES_PROPERTY_ID              = new RepositoryResourceDefinition().getPluralName();
 
   public static final String REPOSITORY_VERSION_TYPE_PROPERTY_ID               = "RepositoryVersions/type";
-  public static final String REPOSITORY_VERSION_DEFINITION_URL                 = "RepositoryVersions/version_url";
   public static final String REPOSITORY_VERSION_RELEASE_VERSION                = "RepositoryVersions/release/version";
   public static final String REPOSITORY_VERSION_RELEASE_BUILD                  = "RepositoryVersions/release/build";
   public static final String REPOSITORY_VERSION_RELEASE_NOTES                  = "RepositoryVersions/release/notes";
@@ -113,7 +110,6 @@ public class RepositoryVersionResourceProvider extends AbstractAuthorizedResourc
       REPOSITORY_VERSION_DISPLAY_NAME_PROPERTY_ID,
       REPOSITORY_VERSION_STACK_NAME_PROPERTY_ID,
       REPOSITORY_VERSION_STACK_VERSION_PROPERTY_ID,
-      REPOSITORY_VERSION_DEFINITION_URL,
       SUBRESOURCE_OPERATING_SYSTEMS_PROPERTY_ID,
       REPOSITORY_VERSION_TYPE_PROPERTY_ID,
       REPOSITORY_VERSION_RELEASE_BUILD,
@@ -192,20 +188,14 @@ public class RepositoryVersionResourceProvider extends AbstractAuthorizedResourc
             REPOSITORY_VERSION_REPOSITORY_VERSION_PROPERTY_ID
           };
 
-          final RepositoryVersionEntity entity;
-          if (properties.containsKey(REPOSITORY_VERSION_DEFINITION_URL)) {
-            String definitionUrl = (String) properties.get(REPOSITORY_VERSION_DEFINITION_URL);
-
-            entity = toRepositoryVersionEntity(definitionUrl);
-          } else {
-            for (String propertyName : requiredProperties) {
-              if (properties.get(propertyName) == null) {
-                throw new AmbariException("Property " + propertyName + " should be provided");
-              }
+          for (String propertyName : requiredProperties) {
+            if (properties.get(propertyName) == null) {
+              throw new AmbariException("Property " + propertyName + " should be provided");
             }
-            entity = toRepositoryVersionEntity(properties);
           }
 
+          RepositoryVersionEntity entity = toRepositoryVersionEntity(properties);
+
           if (repositoryVersionDAO.findByDisplayName(entity.getDisplayName()) != null) {
             throw new AmbariException("Repository version with name " + entity.getDisplayName() + " already exists");
           }
@@ -213,7 +203,12 @@ public class RepositoryVersionResourceProvider extends AbstractAuthorizedResourc
             throw new AmbariException("Repository version for stack " + entity.getStack() + " and version " + entity.getVersion() + " already exists");
           }
 
-          validateRepositoryVersion(entity);
+          if (!upgradePackExists(entity.getVersion())) {
+            throw new AmbariException("Stack " + entity.getStackId() + " doesn't have upgrade packages");
+          }
+
+          validateRepositoryVersion(repositoryVersionDAO, ambariMetaInfo, entity);
+
           repositoryVersionDAO.create(entity);
           notifyCreate(Resource.Type.RepositoryVersion, request);
           return null;
@@ -336,7 +331,12 @@ public class RepositoryVersionResourceProvider extends AbstractAuthorizedResourc
             entity.setDisplayName(propertyMap.get(REPOSITORY_VERSION_DISPLAY_NAME_PROPERTY_ID).toString());
           }
 
-          validateRepositoryVersion(entity);
+          if (!upgradePackExists(entity.getVersion())) {
+            throw new AmbariException("Stack " + entity.getStackId() + " doesn't have upgrade packages");
+          }
+
+          validateRepositoryVersion(repositoryVersionDAO, ambariMetaInfo, entity);
+
           repositoryVersionDAO.merge(entity);
 
           //
@@ -420,20 +420,17 @@ public class RepositoryVersionResourceProvider extends AbstractAuthorizedResourc
    * @param repositoryVersion repository version
    * @throws AmbariException exception with error message
    */
-  protected void validateRepositoryVersion(RepositoryVersionEntity repositoryVersion) throws AmbariException {
+  protected static void validateRepositoryVersion(RepositoryVersionDAO dao,
+      AmbariMetaInfo metaInfo, RepositoryVersionEntity repositoryVersion) throws AmbariException {
     final StackId requiredStack = new StackId(repositoryVersion.getStack());
 
     final String requiredStackName = requiredStack.getStackName();
     final String requiredStackVersion = requiredStack.getStackVersion();
     final String requiredStackId = requiredStack.getStackId();
 
-    if (!upgradePackExists(repositoryVersion.getVersion())) {
-      throw new AmbariException("Stack " + requiredStackId + " doesn't have upgrade packages");
-    }
-
     // List of all repo urls that are already added at stack
     Set<String> existingRepoUrls = new HashSet<String>();
-    List<RepositoryVersionEntity> existingRepoVersions = repositoryVersionDAO.findByStack(requiredStack);
+    List<RepositoryVersionEntity> existingRepoVersions = dao.findByStack(requiredStack);
     for (RepositoryVersionEntity existingRepoVersion : existingRepoVersions) {
       for (OperatingSystemEntity operatingSystemEntity : existingRepoVersion.getOperatingSystems()) {
         for (RepositoryEntity repositoryEntity : operatingSystemEntity.getRepositories()) {
@@ -447,9 +444,10 @@ public class RepositoryVersionResourceProvider extends AbstractAuthorizedResourc
 
     // check that repositories contain only supported operating systems
     final Set<String> osSupported = new HashSet<String>();
-    for (OperatingSystemInfo osInfo: ambariMetaInfo.getOperatingSystems(requiredStackName, requiredStackVersion)) {
+    for (OperatingSystemInfo osInfo: metaInfo.getOperatingSystems(requiredStackName, requiredStackVersion)) {
       osSupported.add(osInfo.getOsType());
     }
+
     final Set<String> osRepositoryVersion = new HashSet<String>();
     for (OperatingSystemEntity os: repositoryVersion.getOperatingSystems()) {
       osRepositoryVersion.add(os.getOsType());
@@ -486,7 +484,7 @@ public class RepositoryVersionResourceProvider extends AbstractAuthorizedResourc
     Collection<StackInfo> stacks = new ArrayList<StackInfo>();
 
     // Search results only in the installed stacks
-    for (Cluster cluster:clusters.get().getClusters().values()){
+    for (Cluster cluster : clusters.get().getClusters().values()){
       stacks.add(ambariMetaInfo.getStack(cluster.getCurrentStackVersion().getStackName(),
                                           cluster.getCurrentStackVersion().getStackVersion()));
     }
@@ -537,49 +535,6 @@ public class RepositoryVersionResourceProvider extends AbstractAuthorizedResourc
     return entity;
   }
 
-  /**
-   * Transforms a XML version defintion to an entity
-   *
-   * @param definitionUrl the String URL for loading
-   * @return constructed entity
-   * @throws AmbariException if some properties are missing or json has incorrect structure
-   */
-  protected RepositoryVersionEntity toRepositoryVersionEntity(String definitionUrl) throws AmbariException {
-    final VersionDefinitionXml xml;
-    final String xmlString;
-    try {
-      URL url = new URL(definitionUrl);
-
-      xmlString = IOUtils.toString(url.openStream(), "UTF-8");
-
-      xml = VersionDefinitionXml.load(xmlString);
-    } catch (Exception e) {
-      String err = String.format("Could not load url from %s.  %s",
-          definitionUrl, e.getMessage());
-      throw new AmbariException(err, e);
-    }
-
-    // !!! TODO validate parsed object graph
-
-    RepositoryVersionEntity entity = new RepositoryVersionEntity();
-
-    StackId stackId = new StackId(xml.release.stackId);
-
-    StackEntity stackEntity = stackDAO.find(stackId.getStackName(), stackId.getStackVersion());
-
-    entity.setStack(stackEntity);
-    entity.setOperatingSystems(repositoryVersionHelper.serializeOperatingSystems(
-        xml.repositoryInfo.getRepositories()));
-    entity.setVersion(xml.release.version + "-" + StringUtils.stripToEmpty(xml.release.build));
-    entity.setDisplayName(stackId.getStackName() + "-" + entity.getVersion());
-    entity.setType(xml.release.repositoryType);
-    entity.setVersionUrl(definitionUrl);
-    entity.setVersionXml(xmlString);
-    entity.setVersionXsd(xml.xsdLocation);
-
-    return entity;
-  }
-
   protected StackId getStackInformationFromUrl(Map<String, Object> propertyMap) {
     if (propertyMap.containsKey(REPOSITORY_VERSION_STACK_NAME_PROPERTY_ID) && propertyMap.containsKey(REPOSITORY_VERSION_STACK_VERSION_PROPERTY_ID)) {
       return new StackId(propertyMap.get(REPOSITORY_VERSION_STACK_NAME_PROPERTY_ID).toString(), propertyMap.get(REPOSITORY_VERSION_STACK_VERSION_PROPERTY_ID).toString());

http://git-wip-us.apache.org/repos/asf/ambari/blob/986a5188/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/VersionDefinitionResourceProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/VersionDefinitionResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/VersionDefinitionResourceProvider.java
new file mode 100644
index 0000000..4b0d3cc
--- /dev/null
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/VersionDefinitionResourceProvider.java
@@ -0,0 +1,331 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.server.controller.internal;
+
+import java.net.URL;
+import java.util.Collections;
+import java.util.EnumSet;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+import org.apache.ambari.server.AmbariException;
+import org.apache.ambari.server.StaticallyInject;
+import org.apache.ambari.server.api.services.AmbariMetaInfo;
+import org.apache.ambari.server.controller.spi.NoSuchParentResourceException;
+import org.apache.ambari.server.controller.spi.NoSuchResourceException;
+import org.apache.ambari.server.controller.spi.Predicate;
+import org.apache.ambari.server.controller.spi.Request;
+import org.apache.ambari.server.controller.spi.RequestStatus;
+import org.apache.ambari.server.controller.spi.Resource;
+import org.apache.ambari.server.controller.spi.ResourceAlreadyExistsException;
+import org.apache.ambari.server.controller.spi.SystemException;
+import org.apache.ambari.server.controller.spi.UnsupportedPropertyException;
+import org.apache.ambari.server.orm.dao.RepositoryVersionDAO;
+import org.apache.ambari.server.orm.dao.StackDAO;
+import org.apache.ambari.server.orm.entities.RepositoryVersionEntity;
+import org.apache.ambari.server.orm.entities.StackEntity;
+import org.apache.ambari.server.security.authorization.ResourceType;
+import org.apache.ambari.server.security.authorization.RoleAuthorization;
+import org.apache.ambari.server.state.StackId;
+import org.apache.ambari.server.state.StackInfo;
+import org.apache.ambari.server.state.repository.VersionDefinitionXml;
+import org.apache.ambari.server.state.stack.upgrade.RepositoryVersionHelper;
+import org.apache.commons.io.IOUtils;
+
+import com.google.common.collect.Sets;
+import com.google.inject.Inject;
+import com.google.inject.Provider;
+
+/**
+ * The {@link VersionDefinitionResourceProvider} class deals with managing Version Definition
+ * files.
+ */
+@StaticallyInject
+public class VersionDefinitionResourceProvider extends AbstractAuthorizedResourceProvider {
+
+  public static final String VERSION_DEF                             = "VersionDefinition";
+  protected static final String VERSION_DEF_ID                       = "VersionDefinition/id";
+
+  public static final String VERSION_DEF_STACK_NAME                  = "VersionDefinition/stack_name";
+  public static final String VERSION_DEF_STACK_VERSION               = "VersionDefinition/stack_version";
+
+  protected static final String VERSION_DEF_TYPE_PROPERTY_ID         = "VersionDefinition/type";
+  protected static final String VERSION_DEF_DEFINITION_URL           = "VersionDefinition/version_url";
+  protected static final String VERSION_DEF_FULL_VERSION             = "VersionDefinition/repository_version";
+  protected static final String VERSION_DEF_RELEASE_VERSION          = "VersionDefinition/release/version";
+  protected static final String VERSION_DEF_RELEASE_BUILD            = "VersionDefinition/release/build";
+  protected static final String VERSION_DEF_RELEASE_NOTES            = "VersionDefinition/release/notes";
+  protected static final String VERSION_DEF_RELEASE_COMPATIBLE_WITH  = "VersionDefinition/release/compatible_with";
+    protected static final String VERSION_DEF_AVAILABLE_SERVICES     = "VersionDefinition/services";
+
+  @Inject
+  private static RepositoryVersionDAO s_repoVersionDAO;
+
+  @Inject
+  private static Provider<AmbariMetaInfo> s_metaInfo;
+
+  @Inject
+  private static Provider<RepositoryVersionHelper> s_repoVersionHelper;
+
+  @Inject
+  private static StackDAO s_stackDAO;
+
+  /**
+   * Key property ids
+   */
+  private static final Set<String> PK_PROPERTY_IDS = Sets.newHashSet(
+      VERSION_DEF_ID,
+      VERSION_DEF_STACK_NAME,
+      VERSION_DEF_STACK_VERSION,
+      VERSION_DEF_FULL_VERSION);
+
+  /**
+   * The property ids for an version definition resource.
+   */
+  private static final Set<String> PROPERTY_IDS = Sets.newHashSet(
+      VERSION_DEF_ID,
+      VERSION_DEF_TYPE_PROPERTY_ID,
+      VERSION_DEF_DEFINITION_URL,
+      VERSION_DEF_FULL_VERSION,
+      VERSION_DEF_RELEASE_NOTES,
+      VERSION_DEF_RELEASE_COMPATIBLE_WITH,
+      VERSION_DEF_RELEASE_VERSION,
+      VERSION_DEF_RELEASE_BUILD,
+      VERSION_DEF_AVAILABLE_SERVICES);
+
+  /**
+   * The key property ids for an version definition resource.
+   */
+  private static final Map<Resource.Type, String> KEY_PROPERTY_IDS = new HashMap<Resource.Type, String>();
+
+  static {
+    KEY_PROPERTY_IDS.put(Resource.Type.VersionDefinition, VERSION_DEF_ID);
+  }
+
+  /**
+   * Constructor.
+   */
+  VersionDefinitionResourceProvider() {
+    super(PROPERTY_IDS, KEY_PROPERTY_IDS);
+
+    setRequiredCreateAuthorizations(EnumSet.of(RoleAuthorization.AMBARI_MANAGE_STACK_VERSIONS));
+
+    setRequiredGetAuthorizations(EnumSet.of(
+        RoleAuthorization.AMBARI_MANAGE_STACK_VERSIONS));
+  }
+
+  @Override
+  protected RequestStatus createResourcesAuthorized(final Request request)
+      throws SystemException,
+      UnsupportedPropertyException, ResourceAlreadyExistsException,
+      NoSuchParentResourceException {
+
+    Set<Map<String, Object>> requestProperties = request.getProperties();
+
+    if (requestProperties.size() > 1) {
+      throw new SystemException("Cannot process more than one file per request");
+    }
+
+    final Map<String, Object> properties = requestProperties.iterator().next();
+    if (!properties.containsKey(VERSION_DEF_DEFINITION_URL)) {
+      throw new SystemException(String.format("%s is required", VERSION_DEF_DEFINITION_URL));
+    }
+
+    RepositoryVersionEntity entity = createResources(new Command<RepositoryVersionEntity>() {
+      @Override
+      public RepositoryVersionEntity invoke() throws AmbariException {
+
+        String definitionUrl = (String) properties.get(VERSION_DEF_DEFINITION_URL);
+
+        RepositoryVersionEntity entity = toRepositoryVersionEntity(definitionUrl);
+
+        RepositoryVersionResourceProvider.validateRepositoryVersion(s_repoVersionDAO,
+            s_metaInfo.get(), entity);
+
+        s_repoVersionDAO.create(entity);
+
+        return entity;
+      }
+    });
+
+    notifyCreate(Resource.Type.VersionDefinition, request);
+
+    RequestStatusImpl status = new RequestStatusImpl(null,
+        Collections.singleton(toResource(entity, Collections.<String>emptySet())));
+
+    return status;
+  }
+
+  @Override
+  public Set<Resource> getResources(Request request, Predicate predicate)
+      throws SystemException, UnsupportedPropertyException,
+      NoSuchResourceException, NoSuchParentResourceException {
+
+    Set<Resource> results = new HashSet<Resource>();
+    Set<String> requestPropertyIds = getRequestPropertyIds(request, predicate);
+
+    if (null == predicate){
+      List<RepositoryVersionEntity> versions = s_repoVersionDAO.findAllDefinitions();
+
+      for (RepositoryVersionEntity entity : versions) {
+        results.add(toResource(entity, requestPropertyIds));
+      }
+
+    } else {
+      for (Map<String, Object> propertyMap : getPropertyMaps(predicate)) {
+        String id = (String) propertyMap.get(VERSION_DEF_ID);
+        if (null == id) {
+          continue;
+        }
+
+        RepositoryVersionEntity entity = s_repoVersionDAO.findByPK(Long.parseLong(id));
+        if (null != entity) {
+          results.add(toResource(entity, requestPropertyIds));
+        }
+      }
+    }
+    return results;
+  }
+
+  @Override
+  protected RequestStatus updateResourcesAuthorized(final Request request, Predicate predicate)
+    throws SystemException, UnsupportedPropertyException,
+      NoSuchResourceException, NoSuchParentResourceException {
+
+    throw new SystemException("Cannot update Version Definitions");
+  }
+
+  @Override
+  protected RequestStatus deleteResourcesAuthorized(Predicate predicate)
+      throws SystemException, UnsupportedPropertyException,
+      NoSuchResourceException, NoSuchParentResourceException {
+    throw new SystemException("Cannot delete Version Definitions");
+  }
+
+  @Override
+  protected Set<String> getPKPropertyIds() {
+    return PK_PROPERTY_IDS;
+  }
+
+  @Override
+  protected ResourceType getResourceType(Request request, Predicate predicate) {
+    return ResourceType.AMBARI;
+  }
+
+  /**
+   * Transforms a XML version defintion to an entity
+   *
+   * @param definitionUrl the String URL for loading
+   * @return constructed entity
+   * @throws AmbariException if some properties are missing or json has incorrect structure
+   */
+  protected RepositoryVersionEntity toRepositoryVersionEntity(String definitionUrl) throws AmbariException {
+    final VersionDefinitionXml xml;
+    final String xmlString;
+    try {
+      URL url = new URL(definitionUrl);
+
+      xmlString = IOUtils.toString(url.openStream(), "UTF-8");
+
+      xml = VersionDefinitionXml.load(xmlString);
+    } catch (Exception e) {
+      String err = String.format("Could not load url from %s.  %s",
+          definitionUrl, e.getMessage());
+      throw new AmbariException(err, e);
+    }
+
+    // !!! TODO validate parsed object graph
+
+    RepositoryVersionEntity entity = new RepositoryVersionEntity();
+
+    StackId stackId = new StackId(xml.release.stackId);
+
+    StackEntity stackEntity = s_stackDAO.find(stackId.getStackName(), stackId.getStackVersion());
+
+    entity.setStack(stackEntity);
+    entity.setOperatingSystems(s_repoVersionHelper.get().serializeOperatingSystems(
+        xml.repositoryInfo.getRepositories()));
+    entity.setVersion(xml.release.getFullVersion());
+    entity.setDisplayName(stackId, xml.release);
+    entity.setType(xml.release.repositoryType);
+    entity.setVersionUrl(definitionUrl);
+    entity.setVersionXml(xmlString);
+    entity.setVersionXsd(xml.xsdLocation);
+
+    return entity;
+  }
+
+  /**
+   * Convert the given {@link RepositoryVersionEntity} to a {@link Resource}.
+   *
+   * @param entity
+   *          the entity to convert.
+   * @param requestedIds
+   *          the properties that were requested or {@code null} for all.
+   * @return the resource representation of the entity (never {@code null}).
+   */
+  private Resource toResource(RepositoryVersionEntity entity, Set<String> requestedIds)
+      throws SystemException {
+
+    Resource resource = new ResourceImpl(Resource.Type.VersionDefinition);
+
+    resource.setProperty(VERSION_DEF_ID, entity.getId());
+
+    VersionDefinitionXml xml = null;
+    try {
+      xml = entity.getRepositoryXml();
+    } catch (Exception e) {
+      String msg = String.format("Could not load version definition %s", entity.getId());
+      throw new SystemException(msg, e);
+    }
+
+    StackId stackId = new StackId(xml.release.stackId);
+
+    // !!! these are needed for href
+    resource.setProperty(VERSION_DEF_STACK_NAME, stackId.getStackName());
+    resource.setProperty(VERSION_DEF_STACK_VERSION, stackId.getStackVersion());
+
+    setResourceProperty(resource, VERSION_DEF_TYPE_PROPERTY_ID, entity.getType(), requestedIds);
+    setResourceProperty(resource, VERSION_DEF_DEFINITION_URL, entity.getVersionUrl(), requestedIds);
+    setResourceProperty(resource, VERSION_DEF_FULL_VERSION, entity.getVersion(), requestedIds);
+    setResourceProperty(resource, VERSION_DEF_RELEASE_BUILD, xml.release.build, requestedIds);
+    setResourceProperty(resource, VERSION_DEF_RELEASE_COMPATIBLE_WITH, xml.release.compatibleWith, requestedIds);
+    setResourceProperty(resource, VERSION_DEF_RELEASE_NOTES, xml.release.releaseNotes, requestedIds);
+    setResourceProperty(resource, VERSION_DEF_RELEASE_VERSION, xml.release.version, requestedIds);
+
+      // !!! future do something with the manifest
+
+    if (isPropertyRequested(VERSION_DEF_AVAILABLE_SERVICES, requestedIds)) {
+      StackInfo stack = null;
+      try {
+        stack = s_metaInfo.get().getStack(stackId.getStackName(), stackId.getStackVersion());
+      } catch (AmbariException e) {
+        throw new SystemException(String.format("Could not load stack %s", stackId));
+      }
+
+      setResourceProperty(resource, VERSION_DEF_AVAILABLE_SERVICES, xml.getAvailableServices(stack), requestedIds);
+    }
+
+    return resource;
+  }
+
+
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/986a5188/ambari-server/src/main/java/org/apache/ambari/server/controller/spi/Resource.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/spi/Resource.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/spi/Resource.java
index 9d6af76..1ccabd1 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/spi/Resource.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/spi/Resource.java
@@ -150,7 +150,8 @@ public interface Resource {
     Credential,
     KerberosDescriptor,
     RoleAuthorization,
-    UserAuthorization;
+    UserAuthorization,
+    VersionDefinition;
 
     /**
      * Get the {@link Type} that corresponds to this InternalType.
@@ -262,6 +263,7 @@ public interface Resource {
     public static final Type KerberosDescriptor = InternalType.KerberosDescriptor.getType();
     public static final Type RoleAuthorization = InternalType.RoleAuthorization.getType();
     public static final Type UserAuthorization = InternalType.UserAuthorization.getType();
+    public static final Type VersionDefinition = InternalType.VersionDefinition.getType();
 
     /**
      * The type name.

http://git-wip-us.apache.org/repos/asf/ambari/blob/986a5188/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/RepositoryVersionDAO.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/RepositoryVersionDAO.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/RepositoryVersionDAO.java
index c37abb5..26f96e8 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/RepositoryVersionDAO.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/RepositoryVersionDAO.java
@@ -144,7 +144,7 @@ public class RepositoryVersionDAO extends CrudDAO<RepositoryVersionEntity, Long>
    * @throws AmbariException
    */
   public RepositoryVersionEntity create(StackEntity stackEntity,
-      String version, String displayName, 
+      String version, String displayName,
       String operatingSystems) throws AmbariException {
       return create(stackEntity, version, displayName, operatingSystems,
           RepositoryType.STANDARD);
@@ -163,7 +163,7 @@ public class RepositoryVersionDAO extends CrudDAO<RepositoryVersionEntity, Long>
    */
   @Transactional
   public RepositoryVersionEntity create(StackEntity stackEntity,
-      String version, String displayName, 
+      String version, String displayName,
       String operatingSystems, RepositoryType type) throws AmbariException {
 
     if (stackEntity == null || version == null || version.isEmpty()
@@ -196,4 +196,16 @@ public class RepositoryVersionDAO extends CrudDAO<RepositoryVersionEntity, Long>
     this.create(newEntity);
     return newEntity;
   }
+
+  /**
+   * Retrieves repository version when they are loaded by a version definition file
+   *
+   * @return a list of entities, or an empty list when there are none
+   */
+  @RequiresSession
+  public List<RepositoryVersionEntity> findAllDefinitions() {
+    final TypedQuery<RepositoryVersionEntity> query = entityManagerProvider.get().createNamedQuery(
+        "repositoryVersionsFromDefinition", RepositoryVersionEntity.class);
+    return daoUtils.selectList(query);
+  }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/986a5188/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/RepositoryVersionEntity.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/RepositoryVersionEntity.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/RepositoryVersionEntity.java
index e2e455b..fa2f905 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/RepositoryVersionEntity.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/RepositoryVersionEntity.java
@@ -44,6 +44,7 @@ import javax.persistence.UniqueConstraint;
 import org.apache.ambari.server.StaticallyInject;
 import org.apache.ambari.server.state.RepositoryType;
 import org.apache.ambari.server.state.StackId;
+import org.apache.ambari.server.state.repository.Release;
 import org.apache.ambari.server.state.repository.VersionDefinitionXml;
 import org.apache.ambari.server.state.stack.upgrade.RepositoryVersionHelper;
 import org.apache.commons.lang.StringUtils;
@@ -68,7 +69,8 @@ import com.google.inject.Provider;
 @NamedQueries({
     @NamedQuery(name = "repositoryVersionByDisplayName", query = "SELECT repoversion FROM RepositoryVersionEntity repoversion WHERE repoversion.displayName=:displayname"),
     @NamedQuery(name = "repositoryVersionByStack", query = "SELECT repoversion FROM RepositoryVersionEntity repoversion WHERE repoversion.stack.stackName=:stackName AND repoversion.stack.stackVersion=:stackVersion"),
-        @NamedQuery(name = "repositoryVersionByStackNameAndVersion", query = "SELECT repoversion FROM RepositoryVersionEntity repoversion WHERE repoversion.stack.stackName=:stackName AND repoversion.version=:version")
+    @NamedQuery(name = "repositoryVersionByStackNameAndVersion", query = "SELECT repoversion FROM RepositoryVersionEntity repoversion WHERE repoversion.stack.stackName=:stackName AND repoversion.version=:version"),
+    @NamedQuery(name = "repositoryVersionsFromDefinition", query = "SELECT repoversion FROM RepositoryVersionEntity repoversion WHERE repoversion.versionXsd IS NOT NULL")
 })
 @StaticallyInject
 public class RepositoryVersionEntity {
@@ -195,6 +197,19 @@ public class RepositoryVersionEntity {
     this.displayName = displayName;
   }
 
+  /**
+   * @param stackId the stack id for the version
+   * @param release the XML release instance
+   */
+  public void setDisplayName(StackId stackId, Release release) {
+    if (StringUtils.isNotBlank(release.display)) {
+      displayName = release.display;
+    } else {
+      displayName = stackId.getStackName() + "-" + release.getFullVersion();
+    }
+  }
+
+
   public String getOperatingSystemsJson() {
     return operatingSystems;
   }

http://git-wip-us.apache.org/repos/asf/ambari/blob/986a5188/ambari-server/src/main/java/org/apache/ambari/server/state/repository/Release.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/repository/Release.java b/ambari-server/src/main/java/org/apache/ambari/server/state/repository/Release.java
index 450fd95..6bcedf5 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/repository/Release.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/repository/Release.java
@@ -23,6 +23,7 @@ import javax.xml.bind.annotation.XmlElement;
 import javax.xml.bind.annotation.XmlRootElement;
 
 import org.apache.ambari.server.state.RepositoryType;
+import org.apache.commons.lang.StringUtils;
 
 /**
  * Release information for a repository.
@@ -67,4 +68,23 @@ public class Release {
   @XmlElement(name="release-notes")
   public String releaseNotes;
 
+  /**
+   * The optional display name
+   */
+  @XmlElement(name="display")
+  public String display;
+
+  /**
+   * @return the full version
+   */
+  public String getFullVersion() {
+    StringBuilder sb = new StringBuilder(version);
+
+    if (StringUtils.isNotBlank(build)) {
+      sb.append('-').append(StringUtils.trim(build));
+    }
+
+    return sb.toString();
+  }
+
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/986a5188/ambari-server/src/main/resources/version_definition.xsd
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/version_definition.xsd b/ambari-server/src/main/resources/version_definition.xsd
index 654ea08..2efdd77 100644
--- a/ambari-server/src/main/resources/version_definition.xsd
+++ b/ambari-server/src/main/resources/version_definition.xsd
@@ -33,6 +33,7 @@
      <xs:element name="build" type="xs:string" />
      <xs:element name="compatible-with" type="xs:string" minOccurs="0" maxOccurs="1" />
      <xs:element name="release-notes" type="xs:string" maxOccurs="1" />
+     <xs:element name="display" type="xs:string" minOccurs="0" />
     </xs:all>
   </xs:complexType>
   

http://git-wip-us.apache.org/repos/asf/ambari/blob/986a5188/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/RepositoryVersionResourceProviderTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/RepositoryVersionResourceProviderTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/RepositoryVersionResourceProviderTest.java
index 12d2091..f3cf954 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/RepositoryVersionResourceProviderTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/RepositoryVersionResourceProviderTest.java
@@ -33,6 +33,7 @@ import org.apache.ambari.server.controller.ResourceProviderFactory;
 import org.apache.ambari.server.controller.predicate.AndPredicate;
 import org.apache.ambari.server.controller.spi.Predicate;
 import org.apache.ambari.server.controller.spi.Request;
+import org.apache.ambari.server.controller.spi.RequestStatus;
 import org.apache.ambari.server.controller.spi.Resource;
 import org.apache.ambari.server.controller.spi.ResourceProvider;
 import org.apache.ambari.server.controller.utilities.PredicateBuilder;
@@ -283,12 +284,12 @@ public class RepositoryVersionResourceProviderTest {
 
     File file = new File("src/test/resources/version_definition_resource_provider.xml");
 
+    final ResourceProvider versionProvider = new VersionDefinitionResourceProvider();
     final ResourceProvider provider = injector.getInstance(ResourceProviderFactory.class).getRepositoryVersionResourceProvider();
 
     final Set<Map<String, Object>> propertySet = new LinkedHashSet<Map<String, Object>>();
     final Map<String, Object> properties = new LinkedHashMap<String, Object>();
-    properties.put(RepositoryVersionResourceProvider.REPOSITORY_VERSION_DISPLAY_NAME_PROPERTY_ID, "name");
-    properties.put(RepositoryVersionResourceProvider.REPOSITORY_VERSION_DEFINITION_URL, file.toURI().toURL().toString());
+    properties.put(VersionDefinitionResourceProvider.VERSION_DEF_DEFINITION_URL, file.toURI().toURL().toString());
     propertySet.add(properties);
 
     final Predicate predicateStackName = new PredicateBuilder().property(RepositoryVersionResourceProvider.REPOSITORY_VERSION_STACK_NAME_PROPERTY_ID).equals("HDP").toPredicate();
@@ -297,9 +298,14 @@ public class RepositoryVersionResourceProviderTest {
     Assert.assertEquals(0, provider.getResources(getRequest, new AndPredicate(predicateStackName, predicateStackVersion)).size());
 
     final Request createRequest = PropertyHelper.getCreateRequest(propertySet, null);
-    provider.createResources(createRequest);
+    RequestStatus status = versionProvider.createResources(createRequest);
+    Assert.assertEquals(1, status.getAssociatedResources().size());
 
-    Set<Resource> results = provider.getResources(getRequest, new AndPredicate(predicateStackName, predicateStackVersion));
+    getRequest = PropertyHelper.getReadRequest("VersionDefinition");
+    Set<Resource> results = versionProvider.getResources(getRequest, null);
+    Assert.assertEquals(1, results.size());
+
+    results = provider.getResources(getRequest, new AndPredicate(predicateStackName, predicateStackVersion));
     Assert.assertEquals(1, results.size());
 
     getRequest = PropertyHelper.getReadRequest(
@@ -374,36 +380,37 @@ public class RepositoryVersionResourceProviderTest {
     StackEntity stackEntity = stackDAO.find("HDP", "1.1");
     Assert.assertNotNull(stackEntity);
 
-    final RepositoryVersionResourceProvider provider = (RepositoryVersionResourceProvider) injector.getInstance(ResourceProviderFactory.class).getRepositoryVersionResourceProvider();
-
     final RepositoryVersionEntity entity = new RepositoryVersionEntity();
     entity.setDisplayName("name");
     entity.setStack(stackEntity);
     entity.setVersion("1.1");
     entity.setOperatingSystems("[{\"OperatingSystems/os_type\":\"redhat6\",\"repositories\":[{\"Repositories/repo_id\":\"1\",\"Repositories/repo_name\":\"1\",\"Repositories/base_url\":\"http://example.com/repo1\"}]}]");
 
+    final RepositoryVersionDAO repositoryVersionDAO = injector.getInstance(RepositoryVersionDAO.class);
+    AmbariMetaInfo info = injector.getInstance(AmbariMetaInfo.class);
+
     // test valid usecases
-    provider.validateRepositoryVersion(entity);
+    RepositoryVersionResourceProvider.validateRepositoryVersion(repositoryVersionDAO, info, entity);
     entity.setVersion("1.1-17");
-    provider.validateRepositoryVersion(entity);
+    RepositoryVersionResourceProvider.validateRepositoryVersion(repositoryVersionDAO, info, entity);
     entity.setVersion("1.1.1.1");
-    provider.validateRepositoryVersion(entity);
+    RepositoryVersionResourceProvider.validateRepositoryVersion(repositoryVersionDAO, info, entity);
     entity.setVersion("1.1.343432.2");
-    provider.validateRepositoryVersion(entity);
+    RepositoryVersionResourceProvider.validateRepositoryVersion(repositoryVersionDAO, info, entity);
     entity.setVersion("1.1.343432.2-234234324");
-    provider.validateRepositoryVersion(entity);
+    RepositoryVersionResourceProvider.validateRepositoryVersion(repositoryVersionDAO, info, entity);
 
     // test invalid usecases
     entity.setOperatingSystems(jsonStringRedhat7);
     try {
-      provider.validateRepositoryVersion(entity);
+      RepositoryVersionResourceProvider.validateRepositoryVersion(repositoryVersionDAO, info, entity);
       Assert.fail("Should throw exception");
     } catch (Exception ex) {
     }
 
     entity.setOperatingSystems("");
     try {
-      provider.validateRepositoryVersion(entity);
+      RepositoryVersionResourceProvider.validateRepositoryVersion(repositoryVersionDAO, info, entity);
       Assert.fail("Should throw exception");
     } catch (Exception ex) {
     }
@@ -412,12 +419,12 @@ public class RepositoryVersionResourceProviderTest {
     stackEntity.setStackName("BIGTOP");
     entity.setStack(bigtop);
     try {
-      provider.validateRepositoryVersion(entity);
+      RepositoryVersionResourceProvider.validateRepositoryVersion(repositoryVersionDAO, info, entity);
       Assert.fail("Should throw exception");
     } catch (Exception ex) {
     }
 
-    final RepositoryVersionDAO repositoryVersionDAO = injector.getInstance(RepositoryVersionDAO.class);
+
     entity.setDisplayName("name");
     entity.setStack(stackEntity);
     entity.setVersion("1.1");
@@ -432,7 +439,7 @@ public class RepositoryVersionResourceProviderTest {
     entity2.setOperatingSystems("[{\"OperatingSystems/os_type\":\"redhat6\",\"repositories\":[{\"Repositories/repo_id\":\"1\",\"Repositories/repo_name\":\"1\",\"Repositories/base_url\":\"http://example.com/repo1\"}]}]");
 
     try {
-      provider.validateRepositoryVersion(entity2);
+      RepositoryVersionResourceProvider.validateRepositoryVersion(repositoryVersionDAO, info, entity2);
       Assert.fail("Should throw exception: Base url http://example.com/repo1 is already defined for another repository version");
     } catch (Exception ex) {
     }


[11/50] [abbrv] ambari git commit: AMBARI-14870. Allow post of Version Definition file directly (ncole)

Posted by nc...@apache.org.
AMBARI-14870. Allow post of Version Definition file directly (ncole)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/77af8e54
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/77af8e54
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/77af8e54

Branch: refs/heads/trunk
Commit: 77af8e54fe4f793f9affa2a70e8ca52ccc90d0cb
Parents: c84da24
Author: Nate Cole <nc...@hortonworks.com>
Authored: Mon Feb 1 17:18:12 2016 -0500
Committer: Nate Cole <nc...@hortonworks.com>
Committed: Tue Feb 2 07:36:29 2016 -0500

----------------------------------------------------------------------
 .../api/services/VersionDefinitionService.java  | 37 ++++++++-
 .../VersionDefinitionResourceProvider.java      | 59 +++++++++++++--
 .../VersionDefinitionResourceProviderTest.java  | 80 ++++++++++++++++++++
 3 files changed, 167 insertions(+), 9 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/77af8e54/ambari-server/src/main/java/org/apache/ambari/server/api/services/VersionDefinitionService.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/api/services/VersionDefinitionService.java b/ambari-server/src/main/java/org/apache/ambari/server/api/services/VersionDefinitionService.java
index 43eb424..e637850 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/api/services/VersionDefinitionService.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/api/services/VersionDefinitionService.java
@@ -20,6 +20,7 @@ package org.apache.ambari.server.api.services;
 
 import java.util.Collections;
 
+import javax.ws.rs.Consumes;
 import javax.ws.rs.GET;
 import javax.ws.rs.POST;
 import javax.ws.rs.Path;
@@ -27,17 +28,22 @@ import javax.ws.rs.PathParam;
 import javax.ws.rs.Produces;
 import javax.ws.rs.core.Context;
 import javax.ws.rs.core.HttpHeaders;
+import javax.ws.rs.core.MediaType;
 import javax.ws.rs.core.Response;
 import javax.ws.rs.core.UriInfo;
 
 import org.apache.ambari.server.api.resources.ResourceInstance;
+import org.apache.ambari.server.controller.internal.VersionDefinitionResourceProvider;
 import org.apache.ambari.server.controller.spi.Resource;
+import org.apache.commons.codec.binary.Base64;
+
+import com.google.gson.JsonObject;
 
 @Path("/version_definitions/")
 public class VersionDefinitionService extends BaseService {
 
   @GET
-  @Produces("text/plain")
+  @Produces(MediaType.TEXT_PLAIN)
   public Response getServices(@Context HttpHeaders headers, @Context UriInfo ui) {
 
     return handleRequest(headers, null, ui, Request.Type.GET,
@@ -46,7 +52,7 @@ public class VersionDefinitionService extends BaseService {
 
   @GET
   @Path("{versionId}")
-  @Produces("text/plain")
+  @Produces(MediaType.TEXT_PLAIN)
   public Response getService(@Context HttpHeaders headers, @Context UriInfo ui,
       @PathParam("versionId") Long versionId) {
 
@@ -55,12 +61,37 @@ public class VersionDefinitionService extends BaseService {
   }
 
   @POST
-  @Produces("text/plain")
+  @Produces(MediaType.TEXT_PLAIN)
   public Response createVersion(String body, @Context HttpHeaders headers, @Context UriInfo ui) {
     return handleRequest(headers, body, ui, Request.Type.POST,
         createResource(null));
   }
 
+  /**
+   * Creates a version by directly POSTing the XML body.  Unfortunately the request processor
+   * uses JSON, so an appropriate JSON structure must be made to get to the ResourceProvider.
+   * @param body     the XML
+   * @param headers  the headers
+   * @param ui       the URI info
+   */
+  @POST
+  @Consumes({MediaType.TEXT_XML})
+  @Produces(MediaType.TEXT_PLAIN)
+  public Response createVersionByXml(String body, @Context HttpHeaders headers,
+      @Context UriInfo ui) throws Exception {
+
+    String encoded = Base64.encodeBase64String(body.getBytes("UTF-8"));
+
+    JsonObject obj = new JsonObject();
+    obj.addProperty(VersionDefinitionResourceProvider.VERSION_DEF_BASE64_PROPERTY, encoded);
+
+    JsonObject payload = new JsonObject();
+    payload.add(VersionDefinitionResourceProvider.VERSION_DEF, obj);
+
+    return handleRequest(headers, payload.toString(), ui, Request.Type.POST,
+        createResource(null));
+  }
+
   protected ResourceInstance createResource(Long versionId) {
     return createResource(Resource.Type.VersionDefinition,
         Collections.singletonMap(Resource.Type.VersionDefinition,

http://git-wip-us.apache.org/repos/asf/ambari/blob/77af8e54/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/VersionDefinitionResourceProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/VersionDefinitionResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/VersionDefinitionResourceProvider.java
index 3533293..ee99a1e 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/VersionDefinitionResourceProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/VersionDefinitionResourceProvider.java
@@ -18,6 +18,7 @@
 package org.apache.ambari.server.controller.internal;
 
 import java.io.InputStream;
+import java.io.UnsupportedEncodingException;
 import java.net.URI;
 import java.util.ArrayList;
 import java.util.Collections;
@@ -42,6 +43,7 @@ import org.apache.ambari.server.controller.spi.Resource;
 import org.apache.ambari.server.controller.spi.ResourceAlreadyExistsException;
 import org.apache.ambari.server.controller.spi.SystemException;
 import org.apache.ambari.server.controller.spi.UnsupportedPropertyException;
+import org.apache.ambari.server.controller.utilities.PropertyHelper;
 import org.apache.ambari.server.orm.dao.RepositoryVersionDAO;
 import org.apache.ambari.server.orm.dao.StackDAO;
 import org.apache.ambari.server.orm.entities.RepositoryVersionEntity;
@@ -53,6 +55,7 @@ import org.apache.ambari.server.state.StackId;
 import org.apache.ambari.server.state.StackInfo;
 import org.apache.ambari.server.state.repository.VersionDefinitionXml;
 import org.apache.ambari.server.state.stack.upgrade.RepositoryVersionHelper;
+import org.apache.commons.codec.binary.Base64;
 import org.apache.commons.io.IOUtils;
 import org.apache.commons.lang.StringUtils;
 
@@ -68,14 +71,15 @@ import com.google.inject.Provider;
 public class VersionDefinitionResourceProvider extends AbstractAuthorizedResourceProvider {
 
   public static final String VERSION_DEF                             = "VersionDefinition";
-  protected static final String VERSION_DEF_ID                       = "VersionDefinition/id";
-
+  public static final String VERSION_DEF_BASE64_PROPERTY             = "version_base64";
   public static final String VERSION_DEF_STACK_NAME                  = "VersionDefinition/stack_name";
   public static final String VERSION_DEF_STACK_VERSION               = "VersionDefinition/stack_version";
 
-
+  protected static final String VERSION_DEF_ID                       = "VersionDefinition/id";
   protected static final String VERSION_DEF_TYPE_PROPERTY_ID         = "VersionDefinition/type";
   protected static final String VERSION_DEF_DEFINITION_URL           = "VersionDefinition/version_url";
+
+  protected static final String VERSION_DEF_DEFINITION_BASE64        = PropertyHelper.getPropertyId(VERSION_DEF, VERSION_DEF_BASE64_PROPERTY);
   protected static final String VERSION_DEF_FULL_VERSION             = "VersionDefinition/repository_version";
   protected static final String VERSION_DEF_RELEASE_VERSION          = "VersionDefinition/release/version";
   protected static final String VERSION_DEF_RELEASE_BUILD            = "VersionDefinition/release/build";
@@ -114,6 +118,7 @@ public class VersionDefinitionResourceProvider extends AbstractAuthorizedResourc
       VERSION_DEF_ID,
       VERSION_DEF_TYPE_PROPERTY_ID,
       VERSION_DEF_DEFINITION_URL,
+      VERSION_DEF_DEFINITION_BASE64,
       VERSION_DEF_FULL_VERSION,
       VERSION_DEF_RELEASE_NOTES,
       VERSION_DEF_RELEASE_COMPATIBLE_WITH,
@@ -156,8 +161,13 @@ public class VersionDefinitionResourceProvider extends AbstractAuthorizedResourc
 
     final Map<String, Object> properties = requestProperties.iterator().next();
 
-    if (!properties.containsKey(VERSION_DEF_DEFINITION_URL)) {
-      throw new IllegalArgumentException(String.format("%s is required", VERSION_DEF_DEFINITION_URL));
+    if (!properties.containsKey(VERSION_DEF_DEFINITION_URL) && !properties.containsKey(VERSION_DEF_DEFINITION_BASE64)) {
+      throw new IllegalArgumentException(String.format("%s is required or upload the file directly", VERSION_DEF_DEFINITION_URL));
+    }
+
+    if (properties.containsKey(VERSION_DEF_DEFINITION_URL) && properties.containsKey(VERSION_DEF_DEFINITION_BASE64)) {
+      throw new IllegalArgumentException(String.format("Specify ONLY the url with %s or upload the file directly",
+          VERSION_DEF_DEFINITION_URL));
     }
 
     RepositoryVersionEntity entity = createResources(new Command<RepositoryVersionEntity>() {
@@ -165,8 +175,14 @@ public class VersionDefinitionResourceProvider extends AbstractAuthorizedResourc
       public RepositoryVersionEntity invoke() throws AmbariException {
 
         String definitionUrl = (String) properties.get(VERSION_DEF_DEFINITION_URL);
+        String definitionBase64 = (String) properties.get(VERSION_DEF_DEFINITION_BASE64);
 
-        XmlHolder holder = loadXml(definitionUrl);
+        XmlHolder holder = null;
+        if (null != definitionUrl) {
+          holder = loadXml(definitionUrl);
+        } else {
+          holder = loadXml(Base64.decodeBase64(definitionBase64));
+        }
 
         RepositoryVersionEntity entity = toRepositoryVersionEntity(holder);
 
@@ -301,6 +317,36 @@ public class VersionDefinitionResourceProvider extends AbstractAuthorizedResourc
     return ResourceType.AMBARI;
   }
 
+  /**
+   * Load the xml data from a posted Base64 stream
+   * @param decoded the decoded Base64 data
+   * @return the XmlHolder instance
+   * @throws AmbariException
+   */
+  private XmlHolder loadXml(byte[] decoded) {
+    XmlHolder holder = new XmlHolder();
+
+    try {
+      holder.xmlString = new String(decoded, "UTF-8");
+    } catch (UnsupportedEncodingException e) {
+      holder.xmlString = new String(decoded);
+    }
+
+    try {
+      holder.xml = VersionDefinitionXml.load(holder.xmlString);
+    } catch (Exception e) {
+      throw new IllegalArgumentException(e);
+    }
+
+    return holder;
+  }
+
+  /**
+   * Load the xml data from a url
+   * @param definitionUrl
+   * @return the XmlHolder instance
+   * @throws AmbariException
+   */
   private XmlHolder loadXml(String definitionUrl) throws AmbariException {
     XmlHolder holder = new XmlHolder();
     holder.url = definitionUrl;
@@ -425,4 +471,5 @@ public class VersionDefinitionResourceProvider extends AbstractAuthorizedResourc
     VersionDefinitionXml xml = null;
   }
 
+
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/77af8e54/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/VersionDefinitionResourceProviderTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/VersionDefinitionResourceProviderTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/VersionDefinitionResourceProviderTest.java
index dbac1b4..efdf84e 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/VersionDefinitionResourceProviderTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/VersionDefinitionResourceProviderTest.java
@@ -18,6 +18,7 @@
 package org.apache.ambari.server.controller.internal;
 
 import java.io.File;
+import java.io.FileInputStream;
 import java.util.LinkedHashMap;
 import java.util.LinkedHashSet;
 import java.util.Map;
@@ -40,6 +41,8 @@ import org.apache.ambari.server.orm.dao.StackDAO;
 import org.apache.ambari.server.orm.entities.RepositoryVersionEntity;
 import org.apache.ambari.server.orm.entities.StackEntity;
 import org.apache.ambari.server.security.TestAuthenticationFactory;
+import org.apache.commons.codec.binary.Base64;
+import org.apache.commons.io.IOUtils;
 import org.junit.After;
 import org.junit.Assert;
 import org.junit.Before;
@@ -84,6 +87,83 @@ public class VersionDefinitionResourceProviderTest {
   }
 
   @Test
+  public void testWithParentFromBase64() throws Exception {
+    Authentication authentication = TestAuthenticationFactory.createAdministrator();
+    SecurityContextHolder.getContext().setAuthentication(authentication);
+
+    File file = new File("src/test/resources/version_definition_resource_provider.xml");
+
+    byte[] bytes = IOUtils.toByteArray(new FileInputStream(file));
+    String base64Str = Base64.encodeBase64String(bytes);
+
+    final ResourceProvider versionProvider = new VersionDefinitionResourceProvider();
+    final ResourceProvider provider = injector.getInstance(ResourceProviderFactory.class)
+        .getRepositoryVersionResourceProvider();
+
+    final Set<Map<String, Object>> propertySet = new LinkedHashSet<Map<String, Object>>();
+    final Map<String, Object> properties = new LinkedHashMap<String, Object>();
+    properties.put(VersionDefinitionResourceProvider.VERSION_DEF_DEFINITION_BASE64, base64Str);
+    propertySet.add(properties);
+
+
+    final Request createRequest = PropertyHelper.getCreateRequest(propertySet, null);
+    RequestStatus status = versionProvider.createResources(createRequest);
+    Assert.assertEquals(1, status.getAssociatedResources().size());
+
+    Request getRequest = PropertyHelper.getReadRequest("VersionDefinition");
+    Set<Resource> results = versionProvider.getResources(getRequest, null);
+    Assert.assertEquals(1, results.size());
+
+    final Predicate predicateStackName = new PredicateBuilder().property(RepositoryVersionResourceProvider.REPOSITORY_VERSION_STACK_NAME_PROPERTY_ID).equals("HDP").toPredicate();
+    final Predicate predicateStackVersion = new PredicateBuilder().property(RepositoryVersionResourceProvider.REPOSITORY_VERSION_STACK_VERSION_PROPERTY_ID).equals("2.2.0").toPredicate();
+
+    results = provider.getResources(getRequest,
+        new AndPredicate(predicateStackName, predicateStackVersion));
+    Assert.assertEquals(1, results.size());
+
+    getRequest = PropertyHelper.getReadRequest(
+        RepositoryVersionResourceProvider.REPOSITORY_VERSION_DISPLAY_NAME_PROPERTY_ID,
+        RepositoryVersionResourceProvider.REPOSITORY_VERSION_ID_PROPERTY_ID,
+        RepositoryVersionResourceProvider.REPOSITORY_VERSION_REPOSITORY_VERSION_PROPERTY_ID,
+        RepositoryVersionResourceProvider.REPOSITORY_VERSION_STACK_NAME_PROPERTY_ID,
+        RepositoryVersionResourceProvider.REPOSITORY_VERSION_STACK_VERSION_PROPERTY_ID,
+        RepositoryVersionResourceProvider.SUBRESOURCE_OPERATING_SYSTEMS_PROPERTY_ID,
+        RepositoryVersionResourceProvider.SUBRESOURCE_REPOSITORIES_PROPERTY_ID,
+        "RepositoryVersions/release", "RepositoryVersions/services",
+        "RepositoryVersions/has_children", "RepositoryVersions/parent_id");
+
+    results = provider.getResources(getRequest,
+        new AndPredicate(predicateStackName, predicateStackVersion));
+    Assert.assertEquals(2, results.size());
+
+    Resource r = null;
+    for (Resource result : results) {
+      if (result.getPropertyValue("RepositoryVersions/repository_version").equals("2.2.0.8-5678")) {
+        r = result;
+        break;
+      }
+    }
+
+    Assert.assertNotNull(r);
+    Map<String, Map<String, Object>> map = r.getPropertiesMap();
+    Assert.assertTrue(map.containsKey("RepositoryVersions"));
+
+    Map<String, Object> vals = map.get("RepositoryVersions");
+
+    Assert.assertEquals("2.2.0.8-5678", vals.get("repository_version"));
+    Assert.assertNotNull(vals.get("parent_id"));
+    Assert.assertEquals(Boolean.FALSE, vals.get("has_children"));
+
+
+    Assert.assertTrue(map.containsKey("RepositoryVersions/release"));
+    vals = map.get("RepositoryVersions/release");
+    Assert.assertEquals("5678", vals.get("build"));
+    Assert.assertEquals("2.3.4.[1-9]", vals.get("compatible_with"));
+    Assert.assertEquals("http://docs.hortonworks.com/HDPDocuments/HDP2/HDP-2.3.4/",
+        vals.get("notes"));
+  }
+
+  @Test
   public void testWithParent() throws Exception {
     Authentication authentication = TestAuthenticationFactory.createAdministrator();
     SecurityContextHolder.getContext().setAuthentication(authentication);


[43/50] [abbrv] ambari git commit: Merge branch 'trunk' into branch-dev-patch-upgrade

Posted by nc...@apache.org.
Merge branch 'trunk' into branch-dev-patch-upgrade


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/50547c5e
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/50547c5e
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/50547c5e

Branch: refs/heads/trunk
Commit: 50547c5ed6076a6fdd18830b179cd28411288496
Parents: 55342fc 9310ab7
Author: Nate Cole <nc...@hortonworks.com>
Authored: Tue Feb 23 09:06:59 2016 -0500
Committer: Nate Cole <nc...@hortonworks.com>
Committed: Tue Feb 23 09:06:59 2016 -0500

----------------------------------------------------------------------
 .../server/upgrade/UpgradeCatalog222.java       |  19 +++
 .../HAWQ/2.0.0/configuration/hawq-env.xml       |   2 +-
 .../PXF/3.0.0/package/scripts/params.py         |   1 +
 .../PXF/3.0.0/package/scripts/pxf_constants.py  |   3 +
 .../PXF/3.0.0/package/scripts/service_check.py  | 167 ++++++++++---------
 .../stacks/HDP/2.2/services/stack_advisor.py    |   6 +-
 .../stacks/HDP/2.3/services/stack_advisor.py    |   2 +
 .../services/HIVE/configuration/hive-site.xml   |   6 +
 .../stacks/HDPWIN/2.2/services/stack_advisor.py |  35 +++-
 .../server/upgrade/UpgradeCatalog222Test.java   |  80 ++++++---
 .../stacks/2.2/common/test_stack_advisor.py     |   4 +-
 .../stacks/2.3/common/test_stack_advisor.py     | 103 +++++++++++-
 .../alerts/definition_configs_controller.js     |  16 +-
 ambari-web/app/controllers/main/host.js         |   2 -
 .../controllers/main/service/info/summary.js    |   7 +-
 ambari-web/app/data/HDP2.3/site_properties.js   |   7 +
 ambari-web/app/data/host/categories.js          |  11 --
 .../mappers/alert_definition_summary_mapper.js  |  23 +++
 ambari-web/app/models/alerts/alert_config.js    |  13 --
 ambari-web/app/models/host_component.js         |  12 ++
 ambari-web/app/styles/alerts.less               |  29 ++--
 .../alerts/configs/alert_config_parameter.hbs   |  10 +-
 ambari-web/app/templates/main/menu_item.hbs     |   5 -
 .../service/info/summary/master_components.hbs  |   8 +
 ambari-web/app/views/main/menu.js               |   8 +-
 contrib/views/files/src/main/resources/view.xml |   8 +
 .../jobs/ResultsPaginationController.java       |   7 +-
 contrib/views/hive/src/main/resources/view.xml  |   9 +
 contrib/views/pig/src/main/resources/view.xml   |   9 +
 .../ambari/view/tez/utils/ProxyHelper.java      |   7 +-
 .../view/utils/hdfs/ConfigurationBuilder.java   |   6 +
 dev-support/docker/docker/bin/ambaribuild.py    |   4 +-
 32 files changed, 452 insertions(+), 177 deletions(-)
----------------------------------------------------------------------



[41/50] [abbrv] ambari git commit: Merge branch 'trunk' into branch-dev-patch-upgrade

Posted by nc...@apache.org.
Merge branch 'trunk' into branch-dev-patch-upgrade


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/ec91f74a
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/ec91f74a
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/ec91f74a

Branch: refs/heads/trunk
Commit: ec91f74a45544d67cb8bece50bd0ba44d9eeb749
Parents: 2eea1bf 5366af8
Author: Nate Cole <nc...@hortonworks.com>
Authored: Mon Feb 22 11:48:50 2016 -0500
Committer: Nate Cole <nc...@hortonworks.com>
Committed: Mon Feb 22 11:48:50 2016 -0500

----------------------------------------------------------------------
 .../main/resources/common-services/YARN/2.1.0.2.0/alerts.json    | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)
----------------------------------------------------------------------



[24/50] [abbrv] ambari git commit: AMBARI-14972 - Add PK to servicecomponentdesiredstate Table To Support FK Relationships (jonathanhurley) (part2)

Posted by nc...@apache.org.
AMBARI-14972 - Add PK to servicecomponentdesiredstate Table To Support FK Relationships (jonathanhurley) (part2)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/2a9f49c0
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/2a9f49c0
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/2a9f49c0

Branch: refs/heads/trunk
Commit: 2a9f49c0e3a1839076fd271cb7caf09b3fc897f4
Parents: c543ef8
Author: Jonathan Hurley <jh...@hortonworks.com>
Authored: Wed Feb 10 12:47:31 2016 -0500
Committer: Jonathan Hurley <jh...@hortonworks.com>
Committed: Wed Feb 10 12:47:38 2016 -0500

----------------------------------------------------------------------
 .../apache/ambari/funtest/server/tests/DeleteServiceTest.java   | 5 ++---
 1 file changed, 2 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/2a9f49c0/ambari-funtest/src/test/java/org/apache/ambari/funtest/server/tests/DeleteServiceTest.java
----------------------------------------------------------------------
diff --git a/ambari-funtest/src/test/java/org/apache/ambari/funtest/server/tests/DeleteServiceTest.java b/ambari-funtest/src/test/java/org/apache/ambari/funtest/server/tests/DeleteServiceTest.java
index 472bdc4..747ae95 100644
--- a/ambari-funtest/src/test/java/org/apache/ambari/funtest/server/tests/DeleteServiceTest.java
+++ b/ambari-funtest/src/test/java/org/apache/ambari/funtest/server/tests/DeleteServiceTest.java
@@ -39,7 +39,6 @@ import org.apache.ambari.server.orm.entities.ServiceComponentDesiredStateEntity;
 import org.apache.ambari.server.orm.entities.HostComponentStateEntity;
 import org.apache.ambari.server.orm.entities.HostComponentDesiredStateEntity;
 import org.apache.ambari.server.orm.entities.ServiceDesiredStateEntityPK;
-import org.apache.ambari.server.orm.entities.ServiceComponentDesiredStateEntityPK;
 import org.apache.ambari.server.state.State;
 
 import org.apache.commons.httpclient.HttpStatus;
@@ -54,6 +53,7 @@ import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertTrue;
 
 
+
 /**
  * Simple test that starts the local ambari server,
  * tests it's status and shuts down the server.
@@ -181,8 +181,7 @@ public class DeleteServiceTest extends ServerTestBase {
         /**
          * ServiceComponentDesiredStateDAO
          */
-        ServiceComponentDesiredStateEntityPK serviceComponentDesiredStateEntityPK = injector.getInstance(ServiceComponentDesiredStateEntityPK.class);
-        ServiceComponentDesiredStateEntity serviceComponentDesiredStateEntity = serviceComponentDesiredStateDAO.findByPK(serviceComponentDesiredStateEntityPK);
+        ServiceComponentDesiredStateEntity serviceComponentDesiredStateEntity = serviceComponentDesiredStateDAO.findById(0L);
         assertTrue(serviceComponentDesiredStateEntity == null);
 
         /**


[13/50] [abbrv] ambari git commit: Merge branch 'trunk' into branch-dev-patch-upgrade

Posted by nc...@apache.org.
Merge branch 'trunk' into branch-dev-patch-upgrade


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/c72dc41d
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/c72dc41d
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/c72dc41d

Branch: refs/heads/trunk
Commit: c72dc41db25fe5898fe7dc36d1aa3bdaf4d0fc5d
Parents: 4c5d2bd 424cca6
Author: Nate Cole <nc...@hortonworks.com>
Authored: Wed Feb 3 14:14:07 2016 -0500
Committer: Nate Cole <nc...@hortonworks.com>
Committed: Wed Feb 3 14:14:07 2016 -0500

----------------------------------------------------------------------
 .../app/scripts/controllers/NavbarCtrl.js       |   9 +
 .../authentication/AuthenticationMainCtrl.js    | 147 ++++++++++-
 .../loginActivities/LoginMessageMainCtrl.js     |  59 ++++-
 .../ui/admin-web/app/scripts/i18n.config.js     |  82 +++++-
 .../resources/ui/admin-web/app/styles/main.css  |  42 +++-
 .../app/views/authentication/main.html          | 250 ++++++++++++++++++-
 .../ui/admin-web/app/views/leftNavbar.html      |   4 +-
 .../app/views/loginActivities/loginMessage.html |  74 ++++--
 .../ui/admin-web/app/views/users/create.html    |   2 +-
 ambari-agent/conf/unix/install-helper.sh        |  40 ++-
 .../src/main/package/deb/control/postinst       |  12 +-
 .../src/main/package/deb/control/posttrm        |  15 --
 ambari-agent/src/main/package/deb/control/prerm |   8 -
 .../src/main/package/rpm/postinstall.sh         |  19 +-
 ambari-agent/src/main/package/rpm/preremove.sh  |   8 -
 .../ambari_agent/CustomServiceOrchestrator.py   |  11 +-
 .../main/python/ambari_agent/PythonExecutor.py  |  14 +-
 .../ambari_agent/PythonReflectiveExecutor.py    |   5 +-
 ambari-agent/src/packages/tarball/all.xml       |   4 +-
 .../timeline/AbstractTimelineMetricsSink.java   |   4 +-
 .../metrics2/sink/timeline/TimelineMetric.java  |  13 +-
 .../sink/timeline/TimelineMetricMetadata.java   |  15 +-
 .../timeline/cache/TimelineMetricsCache.java    |   9 +-
 .../sink/flume/FlumeTimelineMetricsSink.java    |   7 +-
 .../timeline/HadoopTimelineMetricsSink.java     |   6 +-
 .../timeline/HadoopTimelineMetricsSinkTest.java |   5 +-
 .../kafka/KafkaTimelineMetricsReporter.java     |  16 +-
 .../storm/StormTimelineMetricsReporter.java     |   2 -
 .../timeline/HBaseTimelineMetricStore.java      |  19 +-
 .../metrics/timeline/PhoenixHBaseAccessor.java  | 111 +++-----
 .../metrics/timeline/aggregators/Function.java  |  75 ++++--
 .../aggregators/TimelineMetricReadHelper.java   |  38 +++
 .../TimelineMetricMetadataManager.java          |   5 +-
 .../metrics/timeline/FunctionTest.java          |  10 +-
 .../timeline/HBaseTimelineMetricStoreTest.java  |  31 ++-
 ambari-server/conf/unix/install-helper.sh       |  48 +++-
 ambari-server/src/main/assemblies/server.xml    |   2 +-
 .../upgrades/FinalizeUpgradeAction.java         |  23 +-
 .../src/main/package/deb/control/postinst       |  10 +-
 .../src/main/package/deb/control/posttrm        |  15 --
 .../src/main/package/deb/control/prerm          |  18 +-
 .../src/main/package/rpm/postinstall.sh         |   8 +-
 ambari-server/src/main/package/rpm/preremove.sh |  18 +-
 .../configuration/application-properties.xml    |  36 ++-
 .../ATLAS/0.1.0.2.3/metainfo.xml                |   4 +
 .../ATLAS/0.1.0.2.3/package/scripts/params.py   |  11 +
 .../common-services/HAWQ/2.0.0/alerts.json      |  19 ++
 .../2.0.0/package/alerts/alert_sync_status.py   |  91 +++++++
 .../HAWQ/2.0.0/package/scripts/service_check.py |  22 +-
 .../HAWQ/2.0.0/package/scripts/utils.py         |   4 +-
 .../HIVE/0.12.0.2.0/configuration/hive-env.xml  |   2 +
 .../0.12.0.2.0/package/scripts/params_linux.py  |   2 +
 .../PXF/3.0.0/package/scripts/params.py         |   6 +-
 .../PXF/3.0.0/package/scripts/pxf_constants.py  |   1 +
 .../PXF/3.0.0/package/scripts/service_check.py  | 101 +++++---
 .../services/HIVE/configuration/hive-env.xml    |   2 +
 .../upgrades/UpgradeActionTest.java             | 160 ++++++++++--
 .../stacks/2.3/HAWQ/test_alert_sync_status.py   | 194 ++++++++++++++
 .../test/python/stacks/2.3/configs/default.json |   9 +-
 ambari-web/app/assets/data/clusters/info.json   |  11 -
 ambari-web/app/assets/data/settings/motd.json   |  10 +
 .../main/admin/stack_and_upgrade_controller.js  |   2 +
 .../alerts/definition_configs_controller.js     |  29 +++
 .../app/mappers/alert_definitions_mapper.js     |  36 +--
 ambari-web/app/messages.js                      |   1 +
 ambari-web/app/models/alerts/alert_config.js    |  62 ++++-
 .../app/models/alerts/alert_definition.js       |   4 +-
 ambari-web/app/models/upgrade_entity.js         |   2 +-
 ambari-web/app/router.js                        |  41 +++
 ambari-web/app/styles/alerts.less               |   4 +
 .../alerts/configs/alert_config_parameter.hbs   |  33 +++
 ambari-web/app/utils/ajax/ajax.js               |   4 +
 ambari-web/app/views/common/table_view.js       |  16 +-
 .../main/alerts/definition_configs_view.js      |  10 +
 .../admin/stack_and_upgrade_controller_test.js  |  17 ++
 .../definitions_configs_controller_test.js      |  44 +++-
 .../mappers/alert_definitions_mapper_test.js    |  45 +++-
 .../test/models/alerts/alert_config_test.js     | 100 ++++++++
 78 files changed, 2001 insertions(+), 447 deletions(-)
----------------------------------------------------------------------



[18/50] [abbrv] ambari git commit: AMBARI-14924. Create patch upgrade orchestration based on available services (ncole)

Posted by nc...@apache.org.
AMBARI-14924. Create patch upgrade orchestration based on available services (ncole)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/a2561302
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/a2561302
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/a2561302

Branch: refs/heads/trunk
Commit: a2561302530c859f27c678920014db4e4c4a5f7d
Parents: 0f9da42
Author: Nate Cole <nc...@hortonworks.com>
Authored: Thu Feb 4 13:57:18 2016 -0500
Committer: Nate Cole <nc...@hortonworks.com>
Committed: Fri Feb 5 08:50:13 2016 -0500

----------------------------------------------------------------------
 .../checks/HostsRepositoryVersionCheck.java     |  3 +-
 .../internal/UpgradeResourceProvider.java       | 35 +++++++++++-
 .../VersionDefinitionResourceProvider.java      |  6 +--
 .../orm/entities/RepositoryVersionEntity.java   |  2 +-
 .../ambari/server/state/UpgradeContext.java     | 28 ++++++++++
 .../ambari/server/state/UpgradeHelper.java      |  4 ++
 .../checks/HostsRepositoryVersionCheckTest.java | 57 ++++++++++++++++++++
 .../ambari/server/state/UpgradeHelperTest.java  | 57 +++++++++++++++++++-
 8 files changed, 184 insertions(+), 8 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/a2561302/ambari-server/src/main/java/org/apache/ambari/server/checks/HostsRepositoryVersionCheck.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/checks/HostsRepositoryVersionCheck.java b/ambari-server/src/main/java/org/apache/ambari/server/checks/HostsRepositoryVersionCheck.java
index 30250ef..620c651 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/checks/HostsRepositoryVersionCheck.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/checks/HostsRepositoryVersionCheck.java
@@ -53,6 +53,7 @@ public class HostsRepositoryVersionCheck extends AbstractCheckDescriptor {
     super(CheckDescription.HOSTS_REPOSITORY_VERSION);
   }
 
+  @Override
   public boolean isApplicable(PrereqCheckRequest request) throws AmbariException {
     return super.isApplicable(request) && request.getRepositoryVersion() != null;
   }
@@ -77,7 +78,7 @@ public class HostsRepositoryVersionCheck extends AbstractCheckDescriptor {
         for (HostVersionEntity hve : hostVersionDaoProvider.get().findByHost(host.getHostName())) {
 
           if (hve.getRepositoryVersion().getVersion().equals(request.getRepositoryVersion())
-              && hve.getState() == RepositoryVersionState.INSTALLED) {
+              && (hve.getState() == RepositoryVersionState.INSTALLED || hve.getState() == RepositoryVersionState.NOT_REQUIRED)) {
             found = true;
             break;
           }

http://git-wip-us.apache.org/repos/asf/ambari/blob/a2561302/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/UpgradeResourceProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/UpgradeResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/UpgradeResourceProvider.java
index 70440fc..db8c079 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/UpgradeResourceProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/UpgradeResourceProvider.java
@@ -24,6 +24,7 @@ import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.VERSION;
 import java.text.MessageFormat;
 import java.util.ArrayList;
 import java.util.Arrays;
+import java.util.Collection;
 import java.util.Collections;
 import java.util.EnumSet;
 import java.util.HashMap;
@@ -36,8 +37,6 @@ import java.util.Set;
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 
-import com.google.inject.assistedinject.Assisted;
-import com.google.inject.persist.Transactional;
 import org.apache.ambari.server.AmbariException;
 import org.apache.ambari.server.Role;
 import org.apache.ambari.server.RoleCommand;
@@ -86,12 +85,15 @@ import org.apache.ambari.server.state.Clusters;
 import org.apache.ambari.server.state.Config;
 import org.apache.ambari.server.state.ConfigHelper;
 import org.apache.ambari.server.state.DesiredConfig;
+import org.apache.ambari.server.state.RepositoryType;
 import org.apache.ambari.server.state.ServiceInfo;
 import org.apache.ambari.server.state.StackId;
 import org.apache.ambari.server.state.StackInfo;
 import org.apache.ambari.server.state.UpgradeContext;
 import org.apache.ambari.server.state.UpgradeHelper;
 import org.apache.ambari.server.state.UpgradeHelper.UpgradeGroupHolder;
+import org.apache.ambari.server.state.repository.AvailableService;
+import org.apache.ambari.server.state.repository.VersionDefinitionXml;
 import org.apache.ambari.server.state.stack.ConfigUpgradePack;
 import org.apache.ambari.server.state.stack.PrereqCheckStatus;
 import org.apache.ambari.server.state.stack.UpgradePack;
@@ -115,6 +117,8 @@ import com.google.common.collect.Lists;
 import com.google.gson.Gson;
 import com.google.inject.Inject;
 import com.google.inject.Provider;
+import com.google.inject.assistedinject.Assisted;
+import com.google.inject.persist.Transactional;
 
 /**
  * Manages the ability to start and get status of upgrades.
@@ -701,12 +705,37 @@ public class UpgradeResourceProvider extends AbstractControllerResourceProvider
     StackId sourceStackId = null;
     StackId targetStackId = null;
 
+    Set<String> supportedServices = new HashSet<>();
+
     switch (direction) {
       case UPGRADE:
         sourceStackId = cluster.getCurrentStackVersion();
 
         RepositoryVersionEntity targetRepositoryVersion = s_repoVersionDAO.findByStackNameAndVersion(
             sourceStackId.getStackName(), version);
+
+        EnumSet<RepositoryType> serviceAware = EnumSet.of(RepositoryType.PATCH, RepositoryType.SERVICE);
+        if (serviceAware.contains(targetRepositoryVersion.getType())) {
+
+          VersionDefinitionXml xml = null;
+          StackInfo stackInfo = s_metaProvider.get().getStack(sourceStackId.getStackName(),
+              sourceStackId.getStackVersion());
+
+          try {
+            xml = targetRepositoryVersion.getRepositoryXml();
+          } catch (Exception e) {
+            throw new AmbariException(String.format("Could not load repository definition for version %s", version));
+          }
+
+          if (null != xml) {
+            Collection<AvailableService> services = xml.getAvailableServices(stackInfo);
+
+            for (AvailableService available : services) {
+              supportedServices.add(available.getName());
+            }
+          }
+        }
+
         targetStackId = targetRepositoryVersion.getStackId();
         break;
       case DOWNGRADE:
@@ -717,6 +746,8 @@ public class UpgradeResourceProvider extends AbstractControllerResourceProvider
 
     UpgradeContext ctx = new UpgradeContext(resolver, sourceStackId, targetStackId, version,
         direction, pack.getType());
+    ctx.setSupportedServices(supportedServices);
+
 
     if (direction.isDowngrade()) {
       if (requestMap.containsKey(UPGRADE_FROM_VERSION)) {

http://git-wip-us.apache.org/repos/asf/ambari/blob/a2561302/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/VersionDefinitionResourceProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/VersionDefinitionResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/VersionDefinitionResourceProvider.java
index ee99a1e..3ab5169 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/VersionDefinitionResourceProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/VersionDefinitionResourceProvider.java
@@ -262,7 +262,7 @@ public class VersionDefinitionResourceProvider extends AbstractAuthorizedResourc
 
     List<RepositoryVersionEntity> entities = s_repoVersionDAO.findByStack(entity.getStackId());
     if (entities.isEmpty()) {
-      throw new AmbariException(String.format("Patch %s was uploaded, but there are no repositories for %s",
+      throw new IllegalArgumentException(String.format("Patch %s was uploaded, but there are no repositories for %s",
           entity.getVersion(), entity.getStackId().toString()));
     }
 
@@ -290,7 +290,7 @@ public class VersionDefinitionResourceProvider extends AbstractAuthorizedResourc
     if (matching.isEmpty()) {
       String format = "No versions matched pattern %s";
 
-      throw new AmbariException(String.format(format,
+      throw new IllegalArgumentException(String.format(format,
           emptyCompatible ? holder.xml.release.version : holder.xml.release.compatibleWith));
     } else if (matching.size() > 1) {
       Set<String> versions= new HashSet<>();
@@ -298,7 +298,7 @@ public class VersionDefinitionResourceProvider extends AbstractAuthorizedResourc
         versions.add(match.getVersion());
       }
 
-      throw new AmbariException(String.format("More than one repository matches patch %s: %s",
+      throw new IllegalArgumentException(String.format("More than one repository matches patch %s: %s",
           entity.getVersion(), StringUtils.join(versions, ", ")));
     }
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/a2561302/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/RepositoryVersionEntity.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/RepositoryVersionEntity.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/RepositoryVersionEntity.java
index 4af4216..3398709 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/RepositoryVersionEntity.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/RepositoryVersionEntity.java
@@ -349,7 +349,7 @@ public class RepositoryVersionEntity {
   /**
    * Parse the version XML into its object representation.  This causes the XML to be lazy-loaded
    * from storage.
-   * @return {@code null} if the XSD is not available.
+   * @return {@code null} if the XSD (from the XML) is not available.
    * @throws Exception
    */
   public VersionDefinitionXml getRepositoryXml() throws Exception {

http://git-wip-us.apache.org/repos/asf/ambari/blob/a2561302/ambari-server/src/main/java/org/apache/ambari/server/state/UpgradeContext.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/UpgradeContext.java b/ambari-server/src/main/java/org/apache/ambari/server/state/UpgradeContext.java
index bd87a55..05aecac 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/UpgradeContext.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/UpgradeContext.java
@@ -19,8 +19,10 @@ package org.apache.ambari.server.state;
 
 import java.util.ArrayList;
 import java.util.HashMap;
+import java.util.HashSet;
 import java.util.List;
 import java.util.Map;
+import java.util.Set;
 
 import org.apache.ambari.server.api.services.AmbariMetaInfo;
 import org.apache.ambari.server.stack.MasterHostResolver;
@@ -84,6 +86,8 @@ public class UpgradeContext {
    */
   private boolean m_autoSkipManualVerification = false;
 
+  private Set<String> m_supported = new HashSet<>();
+
   /**
    * Constructor.
    *
@@ -350,4 +354,28 @@ public class UpgradeContext {
   public void setAutoSkipManualVerification(boolean autoSkipManualVerification) {
     m_autoSkipManualVerification = autoSkipManualVerification;
   }
+
+  /**
+   * Sets the service names that are supported by an upgrade.  This is used for
+   * {@link RepositoryType#PATCH} and {@link RepositoryType#SERVICE}.
+   *
+   * @param services  the set of specific services
+   */
+  public void setSupportedServices(Set<String> services) {
+    m_supported = services;
+  }
+
+  /**
+   * Gets if a service is supported.  If there are no services marked for the context,
+   * then ALL services are supported
+   * @param serviceName the service name to check.
+   * @return {@code true} when the service is supported
+   */
+  public boolean isServiceSupported(String serviceName) {
+    if (m_supported.isEmpty() || m_supported.contains(serviceName)) {
+      return true;
+    }
+
+    return false;
+  }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/a2561302/ambari-server/src/main/java/org/apache/ambari/server/state/UpgradeHelper.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/UpgradeHelper.java b/ambari-server/src/main/java/org/apache/ambari/server/state/UpgradeHelper.java
index 8213a78..0d9176d 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/UpgradeHelper.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/UpgradeHelper.java
@@ -317,6 +317,10 @@ public class UpgradeHelper {
       // !!! cluster and service checks are empty here
       for (UpgradePack.OrderService service : services) {
 
+        if (!context.isServiceSupported(service.serviceName)) {
+          continue;
+        }
+
         if (upgradePack.getType() == UpgradeType.ROLLING && !allTasks.containsKey(service.serviceName)) {
           continue;
         }

http://git-wip-us.apache.org/repos/asf/ambari/blob/a2561302/ambari-server/src/test/java/org/apache/ambari/server/checks/HostsRepositoryVersionCheckTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/checks/HostsRepositoryVersionCheckTest.java b/ambari-server/src/test/java/org/apache/ambari/server/checks/HostsRepositoryVersionCheckTest.java
index 4529554..433eee1 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/checks/HostsRepositoryVersionCheckTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/checks/HostsRepositoryVersionCheckTest.java
@@ -207,4 +207,61 @@ public class HostsRepositoryVersionCheckTest {
     Assert.assertEquals(PrereqCheckStatus.PASS, check.getStatus());
   }
 
+  @Test
+  public void testPerformWithVersionNotRequired() throws Exception {
+    final HostsRepositoryVersionCheck hostsRepositoryVersionCheck = new HostsRepositoryVersionCheck();
+    hostsRepositoryVersionCheck.clustersProvider = new Provider<Clusters>() {
+
+      @Override
+      public Clusters get() {
+        return clusters;
+      }
+    };
+    hostsRepositoryVersionCheck.repositoryVersionDaoProvider = new Provider<RepositoryVersionDAO>() {
+      @Override
+      public RepositoryVersionDAO get() {
+        return repositoryVersionDAO;
+      }
+    };
+    hostsRepositoryVersionCheck.hostVersionDaoProvider = new Provider<HostVersionDAO>() {
+      @Override
+      public HostVersionDAO get() {
+        return hostVersionDAO;
+      }
+    };
+
+    final Cluster cluster = Mockito.mock(Cluster.class);
+    Mockito.when(cluster.getClusterId()).thenReturn(1L);
+    Mockito.when(cluster.getDesiredStackVersion()).thenReturn(new StackId());
+    Mockito.when(clusters.getCluster("cluster")).thenReturn(cluster);
+    final Map<String, Host> hosts = new HashMap<String, Host>();
+    final Host host1 = Mockito.mock(Host.class);
+    final Host host2 = Mockito.mock(Host.class);
+    final Host host3 = Mockito.mock(Host.class);
+    Mockito.when(host1.getMaintenanceState(1L)).thenReturn(MaintenanceState.OFF);
+    Mockito.when(host2.getMaintenanceState(1L)).thenReturn(MaintenanceState.OFF);
+    Mockito.when(host3.getMaintenanceState(1L)).thenReturn(MaintenanceState.OFF);
+    hosts.put("host1", host1);
+    hosts.put("host2", host2);
+    hosts.put("host3", host3);
+    Mockito.when(clusters.getHostsForCluster("cluster")).thenReturn(hosts);
+
+    RepositoryVersionEntity rve = new RepositoryVersionEntity();
+    rve.setVersion("1.1.1");
+
+    HostVersionEntity hve = new HostVersionEntity();
+    hve.setRepositoryVersion(rve);
+    hve.setState(RepositoryVersionState.NOT_REQUIRED);
+
+    Mockito.when(
+        hostVersionDAO.findByHost(Mockito.anyString())).thenReturn(
+            Collections.singletonList(hve));
+
+    PrerequisiteCheck check = new PrerequisiteCheck(null, null);
+    PrereqCheckRequest request = new PrereqCheckRequest("cluster");
+    request.setRepositoryVersion("1.1.1");
+    hostsRepositoryVersionCheck.perform(check, request);
+    Assert.assertEquals(PrereqCheckStatus.PASS, check.getStatus());
+  }
+
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/a2561302/ambari-server/src/test/java/org/apache/ambari/server/state/UpgradeHelperTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/state/UpgradeHelperTest.java b/ambari-server/src/test/java/org/apache/ambari/server/state/UpgradeHelperTest.java
index eb5bf62..b15157e 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/state/UpgradeHelperTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/state/UpgradeHelperTest.java
@@ -61,10 +61,12 @@ import org.apache.ambari.server.state.stack.upgrade.StageWrapper;
 import org.apache.ambari.server.state.stack.upgrade.Task;
 import org.apache.ambari.server.state.stack.upgrade.TaskWrapper;
 import org.apache.ambari.server.state.stack.upgrade.UpgradeType;
+import org.apache.ambari.server.utils.EventBusSynchronizer;
 import org.easymock.EasyMock;
 import org.junit.After;
 import org.junit.Before;
 import org.junit.Test;
+import org.springframework.security.core.context.SecurityContextHolder;
 
 import com.google.gson.Gson;
 import com.google.gson.reflect.TypeToken;
@@ -74,7 +76,6 @@ import com.google.inject.Injector;
 import com.google.inject.Module;
 import com.google.inject.persist.PersistService;
 import com.google.inject.util.Modules;
-import org.springframework.security.core.context.SecurityContextHolder;
 
 /**
  * Tests the {@link UpgradeHelper} class
@@ -123,6 +124,8 @@ public class UpgradeHelperTest {
     // create an injector which will inject the mocks
     injector = Guice.createInjector(Modules.override(injectorModule).with(mockModule));
     injector.getInstance(GuiceJpaInitializer.class);
+    EventBusSynchronizer.synchronizeAmbariEventPublisher(injector);
+    EventBusSynchronizer.synchronizeAlertEventPublisher(injector);
 
     helper = injector.getInstance(OrmTestHelper.class);
     ambariMetaInfo = injector.getInstance(AmbariMetaInfo.class);
@@ -257,6 +260,58 @@ public class UpgradeHelperTest {
   }
 
   @Test
+  public void testSupportedServiceUpgradeOrchestration() throws Exception {
+    Map<String, UpgradePack> upgrades = ambariMetaInfo.getUpgradePacks("foo", "bar");
+    assertTrue(upgrades.isEmpty());
+
+    upgrades = ambariMetaInfo.getUpgradePacks("HDP", "2.1.1");
+
+    ServiceInfo si = ambariMetaInfo.getService("HDP", "2.1.1", "ZOOKEEPER");
+    si.setDisplayName("Zk");
+    ComponentInfo ci = si.getComponentByName("ZOOKEEPER_SERVER");
+    ci.setDisplayName("ZooKeeper1 Server2");
+
+    assertTrue(upgrades.containsKey("upgrade_test"));
+    UpgradePack upgrade = upgrades.get("upgrade_test");
+    assertNotNull(upgrade);
+
+    makeCluster();
+
+    UpgradeContext context = new UpgradeContext(m_masterHostResolver, HDP_21,
+        HDP_21, UPGRADE_VERSION, Direction.UPGRADE, UpgradeType.ROLLING);
+    context.setSupportedServices(Collections.singleton("ZOOKEEPER"));
+
+
+    List<UpgradeGroupHolder> groups = m_upgradeHelper.createSequence(upgrade, context);
+
+    assertEquals(3, groups.size());
+
+    assertEquals("PRE_CLUSTER", groups.get(0).name);
+    assertEquals("ZOOKEEPER", groups.get(1).name);
+    assertEquals("POST_CLUSTER", groups.get(2).name);
+
+
+    UpgradeGroupHolder group = groups.get(1);
+    // check that the display name is being used
+    assertTrue(group.items.get(1).getText().contains("ZooKeeper1 Server2"));
+    assertEquals(group.items.get(5).getText(), "Service Check Zk");
+
+    UpgradeGroupHolder postGroup = groups.get(2);
+    assertEquals("POST_CLUSTER", postGroup.name);
+    assertEquals("Finalize Upgrade", postGroup.title);
+    assertEquals(3, postGroup.items.size());
+    assertEquals("Confirm Finalize", postGroup.items.get(0).getText());
+    assertEquals("Execute HDFS Finalize", postGroup.items.get(1).getText());
+    assertEquals("Save Cluster State", postGroup.items.get(2).getText());
+    assertEquals(StageWrapper.Type.SERVER_SIDE_ACTION, postGroup.items.get(2).getType());
+
+    assertEquals(4, groups.get(0).items.size());
+    assertEquals(6, groups.get(1).items.size());
+    assertEquals(3, groups.get(2).items.size());
+  }
+
+
+  @Test
   public void testUpgradeServerActionOrchestration() throws Exception {
     Map<String, UpgradePack> upgrades = ambariMetaInfo.getUpgradePacks("HDP", "2.1.1");
 


[02/50] [abbrv] ambari git commit: Merge branch 'trunk' into branch-dev-patch-upgrade

Posted by nc...@apache.org.
Merge branch 'trunk' into branch-dev-patch-upgrade


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/4b36397f
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/4b36397f
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/4b36397f

Branch: refs/heads/trunk
Commit: 4b36397ffd14f2afed81f0c8d16ce2d2e709530f
Parents: f40df6b 09d3e8b
Author: Nate Cole <nc...@hortonworks.com>
Authored: Tue Jan 19 13:26:47 2016 -0500
Committer: Nate Cole <nc...@hortonworks.com>
Committed: Tue Jan 19 13:26:47 2016 -0500

----------------------------------------------------------------------
 .../python/resource_management/core/shell.py    |  10 +-
 .../libraries/functions/ranger_functions.py     |   8 +-
 .../libraries/functions/ranger_functions_v2.py  |   8 +-
 .../server/controller/KerberosHelper.java       |  41 +++-
 .../server/controller/KerberosHelperImpl.java   | 210 +++++++++++++++++--
 .../BlueprintConfigurationProcessor.java        |   5 -
 .../serveraction/ServerActionExecutor.java      |   8 +
 .../AbstractPrepareKerberosServerAction.java    |  90 +++++---
 .../PrepareDisableKerberosServerAction.java     |   2 +-
 .../PrepareEnableKerberosServerAction.java      |   2 +-
 .../PrepareKerberosIdentitiesServerAction.java  |   3 +-
 .../upgrades/SparkShufflePropertyConfig.java    |  14 +-
 .../ambari/server/state/ConfigHelper.java       |   7 +-
 .../server/state/cluster/ClusterImpl.java       |  11 +-
 .../server/state/stack/upgrade/Grouping.java    | 125 +++++++----
 .../stack/upgrade/StageWrapperBuilder.java      |   2 +-
 .../ambari/server/state/stack/upgrade/Task.java |   8 +
 .../server/state/stack/upgrade/TaskWrapper.java |  13 ++
 .../state/stack/upgrade/TaskWrapperBuilder.java |  10 +
 .../svccomphost/ServiceComponentHostImpl.java   |  65 ++++--
 .../topology/ClusterConfigurationRequest.java   |  17 +-
 .../python/ambari_server/serverConfiguration.py |  15 +-
 .../main/resources/Ambari-DDL-Derby-CREATE.sql  |  12 ++
 .../main/resources/Ambari-DDL-MySQL-CREATE.sql  |  11 +
 .../main/resources/Ambari-DDL-Oracle-CREATE.sql |  11 +
 .../resources/Ambari-DDL-Postgres-CREATE.sql    |  12 ++
 .../Ambari-DDL-Postgres-EMBEDDED-CREATE.sql     |  13 ++
 .../resources/Ambari-DDL-SQLAnywhere-CREATE.sql |  10 +
 .../resources/Ambari-DDL-SQLServer-CREATE.sql   |  11 +
 .../0.96.0.2.0/configuration/hbase-env.xml      |   2 +-
 .../HBASE/0.96.0.2.0/package/scripts/hbase.py   |   5 +-
 .../0.96.0.2.0/package/scripts/params_linux.py  |   2 -
 .../STORM/0.9.1.2.1/package/scripts/storm.py    |  11 -
 .../stacks/HDP/2.0.6/services/stack_advisor.py  |   2 +-
 .../HDP/2.1/upgrades/nonrolling-upgrade-2.3.xml |   5 +-
 .../HDP/2.2/upgrades/nonrolling-upgrade-2.2.xml |  29 +--
 .../HDP/2.2/upgrades/nonrolling-upgrade-2.3.xml |  29 +--
 .../HDP/2.2/upgrades/nonrolling-upgrade-2.4.xml |  31 +--
 .../stacks/HDP/2.2/upgrades/upgrade-2.2.xml     |   8 +-
 .../stacks/HDP/2.2/upgrades/upgrade-2.3.xml     |   8 +-
 .../stacks/HDP/2.2/upgrades/upgrade-2.4.xml     |  10 +-
 .../stacks/HDP/2.3/role_command_order.json      |   4 +-
 .../stacks/HDP/2.3/services/stack_advisor.py    |   8 +
 .../HDP/2.3/upgrades/nonrolling-upgrade-2.3.xml |  29 +--
 .../HDP/2.3/upgrades/nonrolling-upgrade-2.4.xml |  31 +--
 .../stacks/HDP/2.3/upgrades/upgrade-2.3.xml     |   8 +-
 .../stacks/HDP/2.3/upgrades/upgrade-2.4.xml     |  29 ++-
 .../services/YARN/configuration/yarn-site.xml   |  14 +-
 .../stacks/HDP/2.4/services/stack_advisor.py    |  78 +------
 .../HDP/2.4/upgrades/nonrolling-upgrade-2.4.xml |  29 +--
 .../stacks/HDP/2.4/upgrades/upgrade-2.4.xml     |   8 +-
 .../StackAdvisorBlueprintProcessorTest.java     |   3 +
 .../server/controller/KerberosHelperTest.java   |  46 +++-
 .../SparkShufflePropertyConfigTest.java         |  43 +---
 .../ambari/server/state/ConfigHelperTest.java   |  52 ++++-
 .../ambari/server/state/UpgradeHelperTest.java  | 168 ++++++++++++---
 .../ClusterConfigurationRequestTest.java        |   3 +-
 .../src/test/python/TestAmbariServer.py         |  38 +++-
 .../stacks/2.0.6/HBASE/test_hbase_client.py     |   4 -
 .../stacks/2.0.6/HBASE/test_hbase_master.py     |   6 -
 .../2.0.6/HBASE/test_hbase_regionserver.py      |   8 -
 .../2.0.6/HBASE/test_phoenix_queryserver.py     |   4 -
 .../stacks/2.3/common/test_stack_advisor.py     |  60 ++++++
 .../HDP/2.0.7/services/OOZIE/metainfo.xml       | 176 ++++++++++++++++
 .../HDP/2.1.1/upgrades/upgrade_bucket_test.xml  |  20 +-
 .../upgrades/upgrade_nonrolling_new_stack.xml   |   6 +-
 .../stacks/HDP/2.1.1/upgrades/upgrade_test.xml  |  45 +++-
 ambari-web/app/controllers/main/service/item.js |  25 ++-
 ambari-web/app/messages.js                      |   3 +-
 .../configs/config_recommendation_parser.js     |   9 +-
 ...onfig_with_override_recommendation_parser.js |   2 +-
 .../mixins/common/configs/enhanced_configs.js   |  13 +-
 ambari-web/app/templates/main/host/details.hbs  |   2 +-
 ambari-web/app/views/common/controls_view.js    |   7 +-
 .../app/views/common/quick_view_link_view.js    |   5 +-
 .../test/controllers/main/service/item_test.js  |  12 ++
 .../config_recommendation_parser_test.js        |  27 ++-
 .../test/views/common/quick_link_view_test.js   |  51 +++++
 78 files changed, 1359 insertions(+), 613 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/4b36397f/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClusterImpl.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/4b36397f/ambari-server/src/main/resources/Ambari-DDL-Derby-CREATE.sql
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/4b36397f/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/4b36397f/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/4b36397f/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/4b36397f/ambari-server/src/main/resources/Ambari-DDL-Postgres-EMBEDDED-CREATE.sql
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/4b36397f/ambari-server/src/main/resources/Ambari-DDL-SQLAnywhere-CREATE.sql
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/4b36397f/ambari-server/src/main/resources/Ambari-DDL-SQLServer-CREATE.sql
----------------------------------------------------------------------


[38/50] [abbrv] ambari git commit: Revert "Merge with trunk"

Posted by nc...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/4d3839c7/ambari-server/src/main/resources/Ambari-DDL-Postgres-EMBEDDED-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-Postgres-EMBEDDED-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-Postgres-EMBEDDED-CREATE.sql
index 105d695..3cc7516 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-Postgres-EMBEDDED-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-Postgres-EMBEDDED-CREATE.sql
@@ -202,10 +202,8 @@ CREATE TABLE ambari.servicecomponentdesiredstate (
   desired_stack_id BIGINT NOT NULL,
   desired_state VARCHAR(255) NOT NULL,
   service_name VARCHAR(255) NOT NULL,
-  recovery_enabled SMALLINT NOT NULL DEFAULT 0,
   CONSTRAINT pk_sc_desiredstate PRIMARY KEY (id),
   CONSTRAINT unq_scdesiredstate_name UNIQUE(component_name, service_name, cluster_id)
-  PRIMARY KEY (component_name, cluster_id, service_name)
 );
 GRANT ALL PRIVILEGES ON TABLE ambari.servicecomponentdesiredstate TO :username;
 
@@ -760,7 +758,6 @@ GRANT ALL PRIVILEGES ON TABLE ambari.setting TO :username;
 -- tasks indices --
 CREATE INDEX idx_stage_request_id ON ambari.stage (request_id);
 CREATE INDEX idx_hrc_request_id ON ambari.host_role_command (request_id);
-CREATE INDEX idx_hrc_status_role ON ambari.host_role_command (status, role);
 CREATE INDEX idx_rsc_request_id ON ambari.role_success_criteria (request_id);
 
 --------altering tables by creating unique constraints----------

http://git-wip-us.apache.org/repos/asf/ambari/blob/4d3839c7/ambari-server/src/main/resources/Ambari-DDL-SQLAnywhere-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-SQLAnywhere-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-SQLAnywhere-CREATE.sql
index a897454..a5bfdc2 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-SQLAnywhere-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-SQLAnywhere-CREATE.sql
@@ -167,10 +167,8 @@ CREATE TABLE servicecomponentdesiredstate (
   desired_stack_id NUMERIC(19) NOT NULL,
   desired_state VARCHAR(255) NOT NULL,
   service_name VARCHAR(255) NOT NULL,
-  recovery_enabled SMALLINT NOT NULL DEFAULT 0,
   CONSTRAINT pk_sc_desiredstate PRIMARY KEY (id),
   CONSTRAINT unq_scdesiredstate_name UNIQUE(component_name, service_name, cluster_id)
-  PRIMARY KEY (component_name, cluster_id, service_name)
 );
 
 CREATE TABLE servicedesiredstate (
@@ -675,7 +673,6 @@ CREATE TABLE setting (
 -- tasks indices --
 CREATE INDEX idx_stage_request_id ON stage (request_id);
 CREATE INDEX idx_hrc_request_id ON host_role_command (request_id);
-CREATE INDEX idx_hrc_status_role ON host_role_command (status, role);
 CREATE INDEX idx_rsc_request_id ON role_success_criteria (request_id);
 
 -- altering tables by creating unique constraints----------

http://git-wip-us.apache.org/repos/asf/ambari/blob/4d3839c7/ambari-server/src/main/resources/Ambari-DDL-SQLServer-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-SQLServer-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-SQLServer-CREATE.sql
index a8bda7c..8e5b2f8 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-SQLServer-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-SQLServer-CREATE.sql
@@ -187,12 +187,10 @@ CREATE TABLE servicecomponentdesiredstate (
   desired_stack_id BIGINT NOT NULL,
   desired_state VARCHAR(255) NOT NULL,
   service_name VARCHAR(255) NOT NULL,
-  recovery_enabled SMALLINT NOT NULL DEFAULT 0,
   PRIMARY KEY CLUSTERED (id),
   CONSTRAINT pk_sc_desiredstate PRIMARY KEY (id),
   CONSTRAINT unq_scdesiredstate_name UNIQUE(component_name, service_name, cluster_id)
-  PRIMARY KEY CLUSTERED (component_name, cluster_id, service_name)
-  );
+);
 
 CREATE TABLE servicedesiredstate (
   cluster_id BIGINT NOT NULL,
@@ -784,7 +782,6 @@ CREATE TABLE setting (
 -- tasks indices --
 CREATE INDEX idx_stage_request_id ON stage (request_id);
 CREATE INDEX idx_hrc_request_id ON host_role_command (request_id);
-CREATE INDEX idx_hrc_status_role ON host_role_command (status, role);
 CREATE INDEX idx_rsc_request_id ON role_success_criteria (request_id);
 
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/4d3839c7/ambari-server/src/main/resources/alert-templates.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/alert-templates.xml b/ambari-server/src/main/resources/alert-templates.xml
index 2e8fc71..d7fe320 100644
--- a/ambari-server/src/main/resources/alert-templates.xml
+++ b/ambari-server/src/main/resources/alert-templates.xml
@@ -158,20 +158,6 @@
                   <div class="label-small">
                     $alert.getAlertText()
                   </div>
-                  <div class="label-small">
-                    Cluster: $alert.getAlertDefinition().getCluster().getClusterName()
-                  </div>
-                  #if( $alert.getHostName() )
-                    #if( $ambari.hasUrl() )
-                      <div class="label-small">
-                      Host: <a href=$ambari.getUrl()/#/main/hosts/$alert.getHostName()/summary>$ambari.getUrl()/#/main/hosts/$alert.getHostName()/summary</a>
-                      </div>
-                    #else
-                      <div class="label-small">
-                        Host: $alert.getHostName()
-                      </div>
-                    #end
-                  #end
                 </td>
               </tr>
             #end
@@ -184,10 +170,6 @@
     This notification was sent to $dispatch.getTargetName()
     <br/>
     Apache Ambari $ambari.getServerVersion()
-    #if( $ambari.hasUrl() )
-    <br/>
-    Ambari Server link: <a href=$ambari.getUrl()>$ambari.getUrl()</a>
-    #end
   </div>
 </html>
       ]]>
@@ -211,4 +193,4 @@
 $alert.getAlertText()]]>
     </body>
   </alert-template>  
-</alert-templates>
+</alert-templates>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/4d3839c7/ambari-server/src/main/resources/common-services/KAFKA/0.8.1.2.2/configuration/kafka-broker.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/KAFKA/0.8.1.2.2/configuration/kafka-broker.xml b/ambari-server/src/main/resources/common-services/KAFKA/0.8.1.2.2/configuration/kafka-broker.xml
index 857f40e..704d73f 100644
--- a/ambari-server/src/main/resources/common-services/KAFKA/0.8.1.2.2/configuration/kafka-broker.xml
+++ b/ambari-server/src/main/resources/common-services/KAFKA/0.8.1.2.2/configuration/kafka-broker.xml
@@ -325,6 +325,7 @@
     <description>Timeline port</description>
   </property>
   <property>
+  <property>
     <name>kafka.timeline.metrics.protocol</name>
     <value>{{metric_collector_protocol}}</value>
     <description>Timeline protocol(http or https)</description>

http://git-wip-us.apache.org/repos/asf/ambari/blob/4d3839c7/ambari-server/src/main/resources/properties.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/properties.json b/ambari-server/src/main/resources/properties.json
index 8ccae05..4052ad2 100644
--- a/ambari-server/src/main/resources/properties.json
+++ b/ambari-server/src/main/resources/properties.json
@@ -62,7 +62,6 @@
         "ServiceComponentInfo/total_count",
         "ServiceComponentInfo/started_count",
         "ServiceComponentInfo/installed_count",
-        "ServiceComponentInfo/recovery_enabled",
         "params/run_smoke_test",
         "_"
     ],

http://git-wip-us.apache.org/repos/asf/ambari/blob/4d3839c7/ambari-server/src/main/resources/scripts/Ambaripreupload.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/scripts/Ambaripreupload.py b/ambari-server/src/main/resources/scripts/Ambaripreupload.py
index fb0c3e1..cc6213e 100644
--- a/ambari-server/src/main/resources/scripts/Ambaripreupload.py
+++ b/ambari-server/src/main/resources/scripts/Ambaripreupload.py
@@ -132,7 +132,6 @@ with Environment() as env:
   TAR_DESTINATION_FOLDER_SUFFIX = "_tar_destination_folder"
   
   class params:
-    hdfs_path_prefix = hdfs_path_prefix
     hdfs_user = "hdfs"
     mapred_user ="mapred"
     hadoop_bin_dir="/usr/hdp/" + hdp_version + "/hadoop/bin"
@@ -237,23 +236,23 @@ with Environment() as env:
     return _copy_files(source_and_dest_pairs, file_owner, group_owner, kinit_if_needed)
   
   def createHdfsResources():
-    params.HdfsResource(format('{hdfs_path_prefix}/atshistory'), user='hdfs', change_permissions_for_parents=True, owner='yarn', group='hadoop', type='directory', action= ['create_on_execute'], mode=0755)
-    params.HdfsResource(format('{hdfs_path_prefix}/user/hcat'), owner='hcat', type='directory', action=['create_on_execute'], mode=0755)
-    params.HdfsResource(format('{hdfs_path_prefix}/hive/warehouse'), owner='hive', type='directory', action=['create_on_execute'], mode=0777)
-    params.HdfsResource(format('{hdfs_path_prefix}/user/hive'), owner='hive', type='directory', action=['create_on_execute'], mode=0755)
-    params.HdfsResource(format('{hdfs_path_prefix}/tmp'), mode=0777, action=['create_on_execute'], type='directory', owner='hdfs')
-    params.HdfsResource(format('{hdfs_path_prefix}/user/ambari-qa'), type='directory', action=['create_on_execute'], mode=0770)
-    params.HdfsResource(format('{hdfs_path_prefix}/user/oozie'), owner='oozie', type='directory', action=['create_on_execute'], mode=0775)
-    params.HdfsResource(format('{hdfs_path_prefix}/app-logs'), recursive_chmod=True, owner='yarn', group='hadoop', type='directory', action=['create_on_execute'], mode=0777)
-    params.HdfsResource(format('{hdfs_path_prefix}/tmp/entity-file-history/active'), owner='yarn', group='hadoop', type='directory', action=['create_on_execute'])
-    params.HdfsResource(format('{hdfs_path_prefix}/mapred'), owner='mapred', type='directory', action=['create_on_execute'])
-    params.HdfsResource(format('{hdfs_path_prefix}/mapred/system'), owner='hdfs', type='directory', action=['create_on_execute'])
-    params.HdfsResource(format('{hdfs_path_prefix}/mr-history/done'), change_permissions_for_parents=True, owner='mapred', group='hadoop', type='directory', action=['create_on_execute'], mode=0777)
-    params.HdfsResource(format('{hdfs_path_prefix}/atshistory/done'), owner='yarn', group='hadoop', type='directory', action=['create_on_execute'], mode=0700)
-    params.HdfsResource(format('{hdfs_path_prefix}/atshistory/active'), owner='yarn', group='hadoop', type='directory', action=['create_on_execute'], mode=01777)
-    params.HdfsResource(format('{hdfs_path_prefix}/ams/hbase'), owner='ams', type='directory', action=['create_on_execute'], mode=0775)
-    params.HdfsResource(format('{hdfs_path_prefix}/amshbase/staging'), owner='ams', type='directory', action=['create_on_execute'], mode=0711)
-    params.HdfsResource(format('{hdfs_path_prefix}/user/ams/hbase'), owner='ams', type='directory', action=['create_on_execute'], mode=0775)
+    params.HdfsResource('/atshistory', user='hdfs', change_permissions_for_parents=True, owner='yarn', group='hadoop', type='directory', action= ['create_on_execute'], mode=0755)
+    params.HdfsResource('/user/hcat', owner='hcat', type='directory', action=['create_on_execute'], mode=0755)
+    params.HdfsResource('/hive/warehouse', owner='hive', type='directory', action=['create_on_execute'], mode=0777)
+    params.HdfsResource('/user/hive', owner='hive', type='directory', action=['create_on_execute'], mode=0755)
+    params.HdfsResource('/tmp', mode=0777, action=['create_on_execute'], type='directory', owner='hdfs')
+    params.HdfsResource('/user/ambari-qa', type='directory', action=['create_on_execute'], mode=0770)
+    params.HdfsResource('/user/oozie', owner='oozie', type='directory', action=['create_on_execute'], mode=0775)
+    params.HdfsResource('/app-logs', recursive_chmod=True, owner='yarn', group='hadoop', type='directory', action=['create_on_execute'], mode=0777)
+    params.HdfsResource('/tmp/entity-file-history/active', owner='yarn', group='hadoop', type='directory', action=['create_on_execute'])
+    params.HdfsResource('/mapred', owner='mapred', type='directory', action=['create_on_execute'])
+    params.HdfsResource('/mapred/system', owner='hdfs', type='directory', action=['create_on_execute'])
+    params.HdfsResource('/mr-history/done', change_permissions_for_parents=True, owner='mapred', group='hadoop', type='directory', action=['create_on_execute'], mode=0777)
+    params.HdfsResource('/atshistory/done', owner='yarn', group='hadoop', type='directory', action=['create_on_execute'], mode=0700)
+    params.HdfsResource('/atshistory/active', owner='yarn', group='hadoop', type='directory', action=['create_on_execute'], mode=01777)
+    params.HdfsResource('/ams/hbase', owner='ams', type='directory', action=['create_on_execute'], mode=0775)
+    params.HdfsResource('/amshbase/staging', owner='ams', type='directory', action=['create_on_execute'], mode=0711)
+    params.HdfsResource('/user/ams/hbase', owner='ams', type='directory', action=['create_on_execute'], mode=0775)
 
 
   def putCreatedHdfsResourcesToIgnore(env):
@@ -263,16 +262,14 @@ with Environment() as env:
     
     file_content = ""
     for file in env.config['hdfs_files']:
-      if not file['target'].startswith(hdfs_path_prefix):
-        raise Exception("Something created outside hdfs_path_prefix!")
-      file_content += file['target'][len(hdfs_path_prefix):]
+      file_content += file['target']
       file_content += "\n"
       
     with open("/var/lib/ambari-agent/data/.hdfs_resource_ignore", "a+") as fp:
       fp.write(file_content)
       
   def putSQLDriverToOozieShared():
-    params.HdfsResource(hdfs_path_prefix + '/user/oozie/share/lib/sqoop/{0}'.format(os.path.basename(SQL_DRIVER_PATH)),
+    params.HdfsResource('/user/oozie/share/lib/sqoop/{0}'.format(os.path.basename(SQL_DRIVER_PATH)),
                         owner='hdfs', type='file', action=['create_on_execute'], mode=0644, source=SQL_DRIVER_PATH)
       
   env.set_params(params)

http://git-wip-us.apache.org/repos/asf/ambari/blob/4d3839c7/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/stack_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/stack_advisor.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/stack_advisor.py
index f6f8cde..7c69ac9 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/stack_advisor.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/stack_advisor.py
@@ -581,8 +581,8 @@ class HDP206StackAdvisor(DefaultStackAdvisor):
         putAmsHbaseSiteProperty("hbase.regionserver.global.memstore.upperLimit", 0.3)
         putAmsHbaseSiteProperty("hbase.regionserver.global.memstore.lowerLimit", 0.25)
         putAmsHbaseSiteProperty("phoenix.query.maxGlobalMemoryPercentage", 20)
-        putAmsHbaseSiteProperty("phoenix.coprocessor.maxMetaDataCacheSize", 81920000)
         putAmsSiteProperty("phoenix.query.maxGlobalMemoryPercentage", 30)
+        putAmsHbaseSiteProperty("phoenix.coprocessor.maxMetaDataCacheSize", 81920000)
       elif total_sinks_count >= 500:
         putAmsHbaseSiteProperty("hbase.regionserver.handler.count", 60)
         putAmsHbaseSiteProperty("hbase.regionserver.hlog.blocksize", 134217728)
@@ -593,9 +593,6 @@ class HDP206StackAdvisor(DefaultStackAdvisor):
         putAmsHbaseSiteProperty("phoenix.coprocessor.maxMetaDataCacheSize", 20480000)
       pass
 
-    metrics_api_handlers = min(50, max(20, int(total_sinks_count / 100)))
-    putAmsSiteProperty("timeline.metrics.service.handler.thread.count", metrics_api_handlers)
-
     # Distributed mode heap size
     if operatingMode == "distributed":
       hbase_heapsize = max(hbase_heapsize, 756)

http://git-wip-us.apache.org/repos/asf/ambari/blob/4d3839c7/ambari-server/src/main/resources/stacks/HDP/2.2/services/stack_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2/services/stack_advisor.py b/ambari-server/src/main/resources/stacks/HDP/2.2/services/stack_advisor.py
index cd25d77..dc968cc 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.2/services/stack_advisor.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2/services/stack_advisor.py
@@ -367,16 +367,13 @@ class HDP22StackAdvisor(HDP21StackAdvisor):
 
     yarn_queues = "default"
     capacitySchedulerProperties = {}
-    if "capacity-scheduler" in services['configurations']:
-      if "capacity-scheduler" in services['configurations']["capacity-scheduler"]["properties"]:
-        properties = str(services['configurations']["capacity-scheduler"]["properties"]["capacity-scheduler"]).split('\n')
-        for property in properties:
-          key,sep,value = property.partition("=")
-          capacitySchedulerProperties[key] = value
-      if "yarn.scheduler.capacity.root.queues" in capacitySchedulerProperties:
-        yarn_queues = str(capacitySchedulerProperties["yarn.scheduler.capacity.root.queues"])
-      elif "yarn.scheduler.capacity.root.queues" in services['configurations']["capacity-scheduler"]["properties"]:
-        yarn_queues =  services['configurations']["capacity-scheduler"]["properties"]["yarn.scheduler.capacity.root.queues"]
+    if "capacity-scheduler" in services['configurations'] and "capacity-scheduler" in services['configurations']["capacity-scheduler"]["properties"]:
+      properties = str(services['configurations']["capacity-scheduler"]["properties"]["capacity-scheduler"]).split('\n')
+      for property in properties:
+        key,sep,value = property.partition("=")
+        capacitySchedulerProperties[key] = value
+    if "yarn.scheduler.capacity.root.queues" in capacitySchedulerProperties:
+      yarn_queues = str(capacitySchedulerProperties["yarn.scheduler.capacity.root.queues"])
     # Interactive Queues property attributes
     putHiveServerPropertyAttribute = self.putPropertyAttribute(configurations, "hiveserver2-site")
     toProcessQueues = yarn_queues.split(",")

http://git-wip-us.apache.org/repos/asf/ambari/blob/4d3839c7/ambari-server/src/main/resources/stacks/HDP/2.3/services/stack_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/services/stack_advisor.py b/ambari-server/src/main/resources/stacks/HDP/2.3/services/stack_advisor.py
index 34e4cfa..b354378 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/services/stack_advisor.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/services/stack_advisor.py
@@ -943,31 +943,31 @@ class HDP23StackAdvisor(HDP22StackAdvisor):
                                 "HAWQ Master or Standby Master cannot use the port 5432 when installed on the same host as the Ambari Server. Ambari Postgres DB uses the same port. Please choose a different value (e.g. 10432)")})
 
     # 2. Check if any data directories are pointing to root dir '/'
-    directories = {
-                    'hawq_master_directory': 'HAWQ Master directory',
-                    'hawq_master_temp_directory': 'HAWQ Master temp directory',
-                    'hawq_segment_directory': 'HAWQ Segment directory',
-                    'hawq_segment_temp_directory': 'HAWQ Segment temp directory'
-                  }
-    for property_name, display_name in directories.iteritems():
-      self.validateIfRootDir(properties, validationItems, property_name, display_name)
+    prop_name = 'hawq_master_directory'
+    display_name = 'HAWQ Master directory'
+    self.validateIfRootDir (properties, validationItems, prop_name, display_name)
+
+    prop_name = 'hawq_master_temp_directory'
+    display_name = 'HAWQ Master temp directory'
+    self.validateIfRootDir (properties, validationItems, prop_name, display_name)
+
+    prop_name = 'hawq_segment_directory'
+    display_name = 'HAWQ Segment directory'
+    self.validateIfRootDir (properties, validationItems, prop_name, display_name)
+
+    prop_name = 'hawq_segment_temp_directory'
+    display_name = 'HAWQ Segment temp directory'
+    self.validateIfRootDir (properties, validationItems, prop_name, display_name)
 
     # 3. Check YARN RM address properties
-    YARN = "YARN"
     servicesList = [service["StackServices"]["service_name"] for service in services["services"]]
-    if YARN in servicesList and "yarn-site" in configurations:
+    if "YARN" in servicesList and "yarn-site" in configurations:
       yarn_site = getSiteProperties(configurations, "yarn-site")
       for hs_prop, ys_prop in self.getHAWQYARNPropertyMapping().items():
         if hs_prop in hawq_site and ys_prop in yarn_site and hawq_site[hs_prop] != yarn_site[ys_prop]:
           message = "Expected value: {0} (this property should have the same value as the property {1} in yarn-site)".format(yarn_site[ys_prop], ys_prop)
           validationItems.append({"config-name": hs_prop, "item": self.getWarnItem(message)})
 
-    # 4. Check HAWQ Resource Manager type
-    HAWQ_GLOBAL_RM_TYPE = "hawq_global_rm_type"
-    if YARN not in servicesList and HAWQ_GLOBAL_RM_TYPE in hawq_site and hawq_site[HAWQ_GLOBAL_RM_TYPE].upper() == YARN:
-      message = "{0} must be set to none if YARN service is not installed".format(HAWQ_GLOBAL_RM_TYPE)
-      validationItems.append({"config-name": HAWQ_GLOBAL_RM_TYPE, "item": self.getErrorItem(message)})
-
     return self.toConfigurationValidationProblems(validationItems, "hawq-site")
   
   

http://git-wip-us.apache.org/repos/asf/ambari/blob/4d3839c7/ambari-server/src/main/resources/stacks/HDP/2.4/services/HIVE/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.4/services/HIVE/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.4/services/HIVE/metainfo.xml
index 93728fd..ee973ed 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.4/services/HIVE/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.4/services/HIVE/metainfo.xml
@@ -26,7 +26,7 @@
             <name>HIVE_SERVER_INTERACTIVE</name>
             <displayName>HiveServer2 Interactive</displayName>
             <category>MASTER</category>
-            <cardinality>0+</cardinality>
+            <cardinality>1</cardinality>
             <versionAdvertised>true</versionAdvertised>
             <clientsToUpdateConfigs></clientsToUpdateConfigs>
             <dependencies>
@@ -35,7 +35,7 @@
                 <scope>cluster</scope>
                 <auto-deploy>
                   <enabled>true</enabled>
-                  <co-locate>HIVE/HIVE_SERVER_INTERACTIVE</co-locate>
+                  <co-locate>HIVE/HIVE_SERVER</co-locate>
                 </auto-deploy>
               </dependency>
               <dependency>
@@ -43,15 +43,6 @@
                 <scope>host</scope>
                 <auto-deploy>
                   <enabled>true</enabled>
-                  <co-locate>HIVE/HIVE_SERVER_INTERACTIVE</co-locate>
-                </auto-deploy>
-              </dependency>
-              <dependency>
-                <name>HDFS/HDFS_CLIENT</name>
-                <scope>host</scope>
-                <auto-deploy>
-                  <enabled>true</enabled>
-                  <co-locate>HIVE/HIVE_SERVER_INTERACTIVE</co-locate>
                 </auto-deploy>
               </dependency>
               <dependency>
@@ -59,7 +50,6 @@
                 <scope>host</scope>
                 <auto-deploy>
                   <enabled>true</enabled>
-                  <co-locate>HIVE/HIVE_SERVER_INTERACTIVE</co-locate>
                 </auto-deploy>
               </dependency>
               <dependency>
@@ -67,26 +57,9 @@
                 <scope>host</scope>
                 <auto-deploy>
                   <enabled>true</enabled>
-                  <co-locate>HIVE/HIVE_SERVER_INTERACTIVE</co-locate>
-                </auto-deploy>
-              </dependency>
-              <dependency>
-                <name>PIG/PIG</name>
-                <scope>host</scope>
-                <auto-deploy>
-                  <enabled>true</enabled>
-                  <co-locate>HIVE/HIVE_SERVER_INTERACTIVE</co-locate>
-                </auto-deploy>
-              </dependency>
-              <dependency>
-                <name>SLIDER/SLIDER</name>
-                <scope>host</scope>
-                <auto-deploy>
-                  <enabled>true</enabled>
-                  <co-locate>HIVE/HIVE_SERVER_INTERACTIVE</co-locate>
                 </auto-deploy>
               </dependency>
-            </dependencies>
+              </dependencies>
                 <commandScript>
                   <script>scripts/hive_server_interactive.py</script>
                   <scriptType>PYTHON</scriptType>
@@ -97,14 +70,6 @@
                 </configuration-dependencies>
           </component>
         </components>
-      <requiredServices>
-        <service>ZOOKEEPER</service>
-        <service>HDFS</service>
-        <service>YARN</service>
-        <service>TEZ</service>
-        <service>PIG</service>
-        <service>SLIDER</service>
-      </requiredServices>
     </service>
   </services>
 </metainfo>

http://git-wip-us.apache.org/repos/asf/ambari/blob/4d3839c7/ambari-server/src/test/java/org/apache/ambari/server/actionmanager/TestActionScheduler.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/actionmanager/TestActionScheduler.java b/ambari-server/src/test/java/org/apache/ambari/server/actionmanager/TestActionScheduler.java
index af6fb9b..bc4d397 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/actionmanager/TestActionScheduler.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/actionmanager/TestActionScheduler.java
@@ -607,8 +607,8 @@ public class TestActionScheduler {
     doAnswer(new Answer<List<HostRoleCommand>>() {
       @Override
       public List<HostRoleCommand> answer(InvocationOnMock invocation) throws Throwable {
-        String role = (String) invocation.getArguments()[0];
-        HostRoleStatus status = (HostRoleStatus) invocation.getArguments()[1];
+        String role = (String) invocation.getArguments()[1];
+        HostRoleStatus status = (HostRoleStatus) invocation.getArguments()[2];
 
         HostRoleCommand task = s.getHostRoleCommand(null, role);
 
@@ -618,7 +618,7 @@ public class TestActionScheduler {
           return Collections.emptyList();
         }
       }
-    }).when(db).getTasksByRoleAndStatus(anyString(), any(HostRoleStatus.class));
+    }).when(db).getTasksByHostRoleAndStatus(anyString(), anyString(), any(HostRoleStatus.class));
 
     ServerActionExecutor.init(injector);
     ActionScheduler scheduler = new ActionScheduler(100, 50, db, aq, fsm, 3,
@@ -762,8 +762,8 @@ public class TestActionScheduler {
     doAnswer(new Answer<List<HostRoleCommand>>() {
       @Override
       public List<HostRoleCommand> answer(InvocationOnMock invocation) throws Throwable {
-        String role = (String) invocation.getArguments()[0];
-        HostRoleStatus status = (HostRoleStatus) invocation.getArguments()[1];
+        String role = (String) invocation.getArguments()[1];
+        HostRoleStatus status = (HostRoleStatus) invocation.getArguments()[2];
 
         HostRoleCommand task = s.getHostRoleCommand(null, role);
 
@@ -774,7 +774,7 @@ public class TestActionScheduler {
         }
 
       }
-    }).when(db).getTasksByRoleAndStatus(anyString(), any(HostRoleStatus.class));
+    }).when(db).getTasksByHostRoleAndStatus(anyString(), anyString(), any(HostRoleStatus.class));
 
     ServerActionExecutor.init(injector);
     ActionScheduler scheduler = new ActionScheduler(100, 50, db, aq, fsm, 3,
@@ -843,8 +843,8 @@ public class TestActionScheduler {
     doAnswer(new Answer<List<HostRoleCommand>>() {
       @Override
       public List<HostRoleCommand> answer(InvocationOnMock invocation) throws Throwable {
-        String role = (String) invocation.getArguments()[0];
-        HostRoleStatus status = (HostRoleStatus) invocation.getArguments()[1];
+        String role = (String) invocation.getArguments()[1];
+        HostRoleStatus status = (HostRoleStatus) invocation.getArguments()[2];
 
         HostRoleCommand task = s.getHostRoleCommand(null, role);
 
@@ -854,7 +854,7 @@ public class TestActionScheduler {
           return Collections.emptyList();
         }
       }
-    }).when(db).getTasksByRoleAndStatus(anyString(), any(HostRoleStatus.class));
+    }).when(db).getTasksByHostRoleAndStatus(anyString(), anyString(), any(HostRoleStatus.class));
 
     ActionScheduler scheduler = new ActionScheduler(100, 50, db, aq, fsm, 3,
         new HostsMap((String) null), unitOfWork, null, conf);
@@ -1951,8 +1951,8 @@ public class TestActionScheduler {
     doAnswer(new Answer<List<HostRoleCommand>>() {
       @Override
       public List<HostRoleCommand> answer(InvocationOnMock invocation) throws Throwable {
-        String role = (String) invocation.getArguments()[0];
-        HostRoleStatus status = (HostRoleStatus) invocation.getArguments()[1];
+        String role = (String) invocation.getArguments()[1];
+        HostRoleStatus status = (HostRoleStatus) invocation.getArguments()[2];
 
         HostRoleCommand task = s.getHostRoleCommand(null, role);
 
@@ -1962,7 +1962,7 @@ public class TestActionScheduler {
           return Collections.emptyList();
         }
       }
-    }).when(db).getTasksByRoleAndStatus(anyString(), any(HostRoleStatus.class));
+    }).when(db).getTasksByHostRoleAndStatus(anyString(), anyString(), any(HostRoleStatus.class));
 
     doAnswer(new Answer<HostRoleCommand>() {
       @Override

http://git-wip-us.apache.org/repos/asf/ambari/blob/4d3839c7/ambari-server/src/test/java/org/apache/ambari/server/agent/AgentResourceTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/agent/AgentResourceTest.java b/ambari-server/src/test/java/org/apache/ambari/server/agent/AgentResourceTest.java
index 6cb9e6f..510e1fb 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/agent/AgentResourceTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/agent/AgentResourceTest.java
@@ -36,7 +36,6 @@ import org.apache.ambari.server.agent.rest.AgentResource;
 import org.apache.ambari.server.api.services.AmbariMetaInfo;
 import org.apache.ambari.server.events.publishers.AmbariEventPublisher;
 import org.apache.ambari.server.orm.DBAccessor;
-import org.apache.ambari.server.orm.dao.HostRoleCommandDAO;
 import org.apache.ambari.server.security.SecurityHelper;
 import org.apache.ambari.server.security.SecurityHelperImpl;
 import org.apache.ambari.server.stack.StackManagerFactory;
@@ -309,7 +308,6 @@ public class AgentResourceTest extends RandomPortJerseyTest {
       bind(HeartBeatHandler.class).toInstance(handler);
       bind(AmbariMetaInfo.class).toInstance(ambariMetaInfo);
       bind(DBAccessor.class).toInstance(mock(DBAccessor.class));
-      bind(HostRoleCommandDAO.class).toInstance(mock(HostRoleCommandDAO.class));
     }
 
     private void installDependencies() {

http://git-wip-us.apache.org/repos/asf/ambari/blob/4d3839c7/ambari-server/src/test/java/org/apache/ambari/server/configuration/ConfigurationTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/configuration/ConfigurationTest.java b/ambari-server/src/test/java/org/apache/ambari/server/configuration/ConfigurationTest.java
index 3ecb5aa..4e236f3 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/configuration/ConfigurationTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/configuration/ConfigurationTest.java
@@ -563,99 +563,4 @@ public class ConfigurationTest {
     Assert.assertEquals(44, configuration.getPropertyProvidersThreadPoolMaxSize());
   }
 
-
-  public void testGetHostRoleCommandStatusSummaryCacheSize() throws  Exception {
-    // Given
-    final Properties ambariProperties = new Properties();
-    final Configuration configuration = new Configuration(ambariProperties);
-    ambariProperties.setProperty(Configuration.SERVER_HRC_STATUS_SUMMARY_CACHE_SIZE, "3000");
-
-    // When
-    long actualCacheSize = configuration.getHostRoleCommandStatusSummaryCacheSize();
-
-    // Then
-    Assert.assertEquals(actualCacheSize, 3000L);
-  }
-
-  @Test
-  public void testGetHostRoleCommandStatusSummaryCacheSizeDefault() throws  Exception {
-    // Given
-    final Properties ambariProperties = new Properties();
-    final Configuration configuration = new Configuration(ambariProperties);
-
-    // When
-    long actualCacheSize = configuration.getHostRoleCommandStatusSummaryCacheSize();
-
-    // Then
-    Assert.assertEquals(actualCacheSize, Configuration.SERVER_HRC_STATUS_SUMMARY_CACHE_SIZE_DEFAULT);
-  }
-
-  @Test
-  public void testGetHostRoleCommandStatusSummaryCacheExpiryDuration() throws  Exception {
-    // Given
-    final Properties ambariProperties = new Properties();
-    final Configuration configuration = new Configuration(ambariProperties);
-    ambariProperties.setProperty(Configuration.SERVER_HRC_STATUS_SUMMARY_CACHE_EXPIRY_DURATION, "60");
-
-    // When
-    long actualCacheExpiryDuration = configuration.getHostRoleCommandStatusSummaryCacheExpiryDuration();
-
-    // Then
-    Assert.assertEquals(actualCacheExpiryDuration, 60L);
-  }
-
-  @Test
-  public void testGetHostRoleCommandStatusSummaryCacheExpiryDurationDefault() throws  Exception {
-    // Given
-    final Properties ambariProperties = new Properties();
-    final Configuration configuration = new Configuration(ambariProperties);
-
-    // When
-    long actualCacheExpiryDuration = configuration.getHostRoleCommandStatusSummaryCacheExpiryDuration();
-
-    // Then
-    Assert.assertEquals(actualCacheExpiryDuration, Configuration.SERVER_HRC_STATUS_SUMMARY_CACHE_EXPIRY_DURATION_DEFAULT);
-  }
-
-  @Test
-  public void testGetHostRoleCommandStatusSummaryCacheEnabled() throws  Exception {
-    // Given
-    final Properties ambariProperties = new Properties();
-    final Configuration configuration = new Configuration(ambariProperties);
-    ambariProperties.setProperty(Configuration.SERVER_HRC_STATUS_SUMMARY_CACHE_ENABLED, "true");
-
-    // When
-    boolean actualCacheEnabledConfig = configuration.getHostRoleCommandStatusSummaryCacheEnabled();
-
-    // Then
-    Assert.assertEquals(actualCacheEnabledConfig, true);
-  }
-
-  @Test
-  public void testGetHostRoleCommandStatusSummaryCacheDisabled() throws  Exception {
-    // Given
-    final Properties ambariProperties = new Properties();
-    final Configuration configuration = new Configuration(ambariProperties);
-    ambariProperties.setProperty(Configuration.SERVER_HRC_STATUS_SUMMARY_CACHE_ENABLED, "false");
-
-    // When
-    boolean actualCacheEnabledConfig = configuration.getHostRoleCommandStatusSummaryCacheEnabled();
-
-    // Then
-    Assert.assertEquals(actualCacheEnabledConfig, false);
-  }
-
-  @Test
-  public void testGetHostRoleCommandStatusSummaryCacheEnabledDefault() throws  Exception {
-    // Given
-    final Properties ambariProperties = new Properties();
-    final Configuration configuration = new Configuration(ambariProperties);
-
-    // When
-    boolean actualCacheEnabledConfig = configuration.getHostRoleCommandStatusSummaryCacheEnabled();
-
-    // Then
-    Assert.assertEquals(actualCacheEnabledConfig, Configuration.SERVER_HRC_STATUS_SUMMARY_CACHE_ENABLED_DEFAULT);
-  }
-
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/4d3839c7/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java
index 992150c..7b26f23 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java
@@ -5679,7 +5679,6 @@ public class AmbariManagementControllerTest {
       clusters.getCluster(clusterName).getService(serviceName)
       .getServiceComponents().values()) {
       Assert.assertEquals(State.INSTALLED, sc.getDesiredState());
-      Assert.assertFalse(sc.isRecoveryEnabled()); // default value of recoveryEnabled
       for (ServiceComponentHost sch : sc.getServiceComponentHosts().values()) {
         Assert.assertEquals(State.INSTALLED, sch.getDesiredState());
         Assert.assertEquals(State.INIT, sch.getState());
@@ -5696,7 +5695,6 @@ public class AmbariManagementControllerTest {
     for (ServiceComponent sc :
       clusters.getCluster(clusterName).getService(serviceName)
           .getServiceComponents().values()) {
-      sc.setRecoveryEnabled(true);
       for (ServiceComponentHost sch : sc.getServiceComponentHosts().values()) {
         sch.setState(State.INSTALLED);
       }
@@ -5716,7 +5714,6 @@ public class AmbariManagementControllerTest {
       clusters.getCluster(clusterName).getService(serviceName)
           .getServiceComponents().values()) {
       Assert.assertEquals(State.INSTALLED, sc.getDesiredState());
-      Assert.assertTrue(sc.isRecoveryEnabled());
       for (ServiceComponentHost sch : sc.getServiceComponentHosts().values()) {
         Assert.assertEquals(State.INSTALLED, sch.getDesiredState());
         Assert.assertEquals(State.INSTALLED, sch.getState());

http://git-wip-us.apache.org/repos/asf/ambari/blob/4d3839c7/ambari-server/src/test/java/org/apache/ambari/server/controller/KerberosHelperTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/KerberosHelperTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/KerberosHelperTest.java
index f6027f3..2dcde00 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/KerberosHelperTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/KerberosHelperTest.java
@@ -42,7 +42,6 @@ import org.apache.ambari.server.controller.spi.Resource;
 import org.apache.ambari.server.controller.spi.ResourceProvider;
 import org.apache.ambari.server.metadata.RoleCommandOrder;
 import org.apache.ambari.server.orm.DBAccessor;
-import org.apache.ambari.server.orm.dao.HostRoleCommandDAO;
 import org.apache.ambari.server.security.SecurityHelper;
 import org.apache.ambari.server.security.credential.PrincipalKeyCredential;
 import org.apache.ambari.server.security.encryption.CredentialStoreService;
@@ -216,7 +215,6 @@ public class KerberosHelperTest extends EasyMockSupport {
         bind(CreatePrincipalsServerAction.class).toInstance(createMock(CreatePrincipalsServerAction.class));
         bind(CreateKeytabFilesServerAction.class).toInstance(createMock(CreateKeytabFilesServerAction.class));
         bind(StackAdvisorHelper.class).toInstance(createMock(StackAdvisorHelper.class));
-        bind(HostRoleCommandDAO.class).toInstance(createNiceMock(HostRoleCommandDAO.class));
       }
     });
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/4d3839c7/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ComponentResourceProviderTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ComponentResourceProviderTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ComponentResourceProviderTest.java
index d24ca09..f38fab1 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ComponentResourceProviderTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ComponentResourceProviderTest.java
@@ -225,14 +225,11 @@ public class ComponentResourceProviderTest {
     expect(service.getServiceComponents()).andReturn(serviceComponentMap).anyTimes();
 
     expect(serviceComponent1.convertToResponse()).andReturn(
-      new ServiceComponentResponse(100L, "Cluster100", "Service100", "Component100", null, "", 1, 1, 0,
-              true /* recovery enabled */));
+      new ServiceComponentResponse(100L, "Cluster100", "Service100", "Component100", null, "", 1, 1, 0));
     expect(serviceComponent2.convertToResponse()).andReturn(
-      new ServiceComponentResponse(100L, "Cluster100", "Service100", "Component101", null, "", 1, 1, 0,
-              false /* recovery not enabled */));
+      new ServiceComponentResponse(100L, "Cluster100", "Service100", "Component101", null, "", 1, 1, 0));
     expect(serviceComponent3.convertToResponse()).andReturn(
-      new ServiceComponentResponse(100L, "Cluster100", "Service100", "Component102", null, "", 1, 1, 0,
-              true /* recovery enabled */));
+      new ServiceComponentResponse(100L, "Cluster100", "Service100", "Component102", null, "", 1, 1, 0));
 
     expect(ambariMetaInfo.getComponent((String) anyObject(),
         (String) anyObject(), (String) anyObject(), (String) anyObject()))
@@ -261,7 +258,6 @@ public class ComponentResourceProviderTest {
     propertyIds.add(ComponentResourceProvider.COMPONENT_TOTAL_COUNT_PROPERTY_ID);
     propertyIds.add(ComponentResourceProvider.COMPONENT_STARTED_COUNT_PROPERTY_ID);
     propertyIds.add(ComponentResourceProvider.COMPONENT_INSTALLED_COUNT_PROPERTY_ID);
-    propertyIds.add(ComponentResourceProvider.COMPONENT_RECOVERY_ENABLED_ID);
 
     Predicate predicate = new PredicateBuilder()
       .property(ComponentResourceProvider.COMPONENT_CLUSTER_NAME_PROPERTY_ID)
@@ -286,8 +282,6 @@ public class ComponentResourceProviderTest {
         ComponentResourceProvider.COMPONENT_STARTED_COUNT_PROPERTY_ID));
       Assert.assertEquals(0, resource.getPropertyValue(
         ComponentResourceProvider.COMPONENT_INSTALLED_COUNT_PROPERTY_ID));
-      Assert.assertEquals(String.valueOf(true), resource.getPropertyValue(
-        ComponentResourceProvider.COMPONENT_RECOVERY_ENABLED_ID));
     }
 
     // verify
@@ -370,14 +364,11 @@ public class ComponentResourceProviderTest {
     expect(component3Info.getCategory()).andReturn(null);
 
     expect(serviceComponent1.convertToResponse()).andReturn(
-      new ServiceComponentResponse(100L, "Cluster100", "Service100", "Component101", null, "", 1, 0, 1,
-              false /* recovery not enabled */));
+      new ServiceComponentResponse(100L, "Cluster100", "Service100", "Component101", null, "", 1, 0, 1));
     expect(serviceComponent2.convertToResponse()).andReturn(
-      new ServiceComponentResponse(100L, "Cluster100", "Service100", "Component102", null, "", 1, 0, 1,
-              false /* recovery not enabled */));
+      new ServiceComponentResponse(100L, "Cluster100", "Service100", "Component102", null, "", 1, 0, 1));
     expect(serviceComponent3.convertToResponse()).andReturn(
-      new ServiceComponentResponse(100L, "Cluster100", "Service100", "Component103", null, "", 1, 0, 1,
-              false /* recovery not enabled */));
+      new ServiceComponentResponse(100L, "Cluster100", "Service100", "Component103", null, "", 1, 0, 1));
     expect(serviceComponent1.getDesiredState()).andReturn(State.INSTALLED).anyTimes();
     expect(serviceComponent2.getDesiredState()).andReturn(State.INSTALLED).anyTimes();
     expect(serviceComponent3.getDesiredState()).andReturn(State.INSTALLED).anyTimes();
@@ -421,7 +412,6 @@ public class ComponentResourceProviderTest {
 
     Map<String, Object> properties = new LinkedHashMap<String, Object>();
 
-    properties.put(ComponentResourceProvider.COMPONENT_RECOVERY_ENABLED_ID, String.valueOf(true) /* recovery enabled */);
     properties.put(ComponentResourceProvider.COMPONENT_STATE_PROPERTY_ID, "STARTED");
     properties.put(ComponentResourceProvider.COMPONENT_CLUSTER_NAME_PROPERTY_ID, "Cluster100");
 
@@ -617,7 +607,7 @@ public class ComponentResourceProviderTest {
 
     // requests
     ServiceComponentRequest request1 = new ServiceComponentRequest("cluster1", "service1", "component1",
-        null, String.valueOf(true /* recovery enabled */));
+        null);
 
     Set<ServiceComponentRequest> setRequests = new HashSet<ServiceComponentRequest>();
     setRequests.add(request1);
@@ -677,15 +667,14 @@ public class ComponentResourceProviderTest {
 
     // requests
     ServiceComponentRequest request1 = new ServiceComponentRequest("cluster1", "service1", "component1",
-        null, String.valueOf(true /* recovery enabled */));
+        null);
     ServiceComponentRequest request2 = new ServiceComponentRequest("cluster1", "service1", "component2",
-        null, String.valueOf(true /* recovery enabled */));
+        null);
     ServiceComponentRequest request3 = new ServiceComponentRequest("cluster1", "service1", "component3",
-        null, String.valueOf(true /* recovery enabled */));
+        null);
     ServiceComponentRequest request4 = new ServiceComponentRequest("cluster1", "service1", "component4",
-        null, String.valueOf(true /* recovery enabled */));
-    ServiceComponentRequest request5 = new ServiceComponentRequest("cluster1", "service2", null, null,
-              String.valueOf(true /* recovery enabled */));
+        null);
+    ServiceComponentRequest request5 = new ServiceComponentRequest("cluster1", "service2", null, null);
 
     Set<ServiceComponentRequest> setRequests = new HashSet<ServiceComponentRequest>();
     setRequests.add(request1);
@@ -769,7 +758,7 @@ public class ComponentResourceProviderTest {
 
     // requests
     ServiceComponentRequest request1 = new ServiceComponentRequest("cluster1", "service1", "component1",
-        null, String.valueOf(true /* recovery enabled */));
+        null);
 
     Set<ServiceComponentRequest> setRequests = new HashSet<ServiceComponentRequest>();
     setRequests.add(request1);

http://git-wip-us.apache.org/repos/asf/ambari/blob/4d3839c7/ambari-server/src/test/java/org/apache/ambari/server/stack/StackManagerTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/stack/StackManagerTest.java b/ambari-server/src/test/java/org/apache/ambari/server/stack/StackManagerTest.java
index d7a15e2..455652b 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/stack/StackManagerTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/stack/StackManagerTest.java
@@ -31,7 +31,6 @@ import static org.junit.Assert.assertTrue;
 import java.io.File;
 import java.io.FileReader;
 import java.lang.reflect.Type;
-import java.net.URL;
 import java.util.ArrayList;
 import java.util.Collection;
 import java.util.HashMap;
@@ -63,7 +62,6 @@ import org.apache.commons.lang.StringUtils;
 import org.junit.BeforeClass;
 import org.junit.Ignore;
 import org.junit.Test;
-import org.springframework.util.Assert;
 
 /**
  * StackManager unit tests.
@@ -643,16 +641,12 @@ public class StackManagerTest {
         stack.getKerberosDescriptorFileLocation());
   }
 
+  @Ignore
   @Test
   public void testMetricsLoaded() throws Exception {
 
-    URL rootDirectoryURL = StackManagerTest.class.getResource("/");
-    Assert.notNull(rootDirectoryURL);
-
-    File resourcesDirectory = new File(new File(rootDirectoryURL.getFile()).getParentFile().getParentFile(), "src/main/resources");
-
-    File stackRoot = new File(resourcesDirectory, "stacks");
-    File commonServices = new File(resourcesDirectory, "common-services");
+    String stackRoot = ClassLoader.getSystemClassLoader().getResource("stacks").getPath().replace("test-classes","classes");
+    String commonServices = ClassLoader.getSystemClassLoader().getResource("common-services").getPath().replace("test-classes","classes");
 
     MetainfoDAO metaInfoDao = createNiceMock(MetainfoDAO.class);
     StackDAO stackDao = createNiceMock(StackDAO.class);
@@ -666,7 +660,7 @@ public class StackManagerTest {
 
     OsFamily osFamily = new OsFamily(config);
 
-    StackManager stackManager = new StackManager(stackRoot, commonServices,
+    StackManager stackManager = new StackManager(new File(stackRoot), new File(commonServices),
             osFamily, metaInfoDao, actionMetadata, stackDao);
 
     for (StackInfo stackInfo : stackManager.getStacks()) {
@@ -688,15 +682,12 @@ public class StackManagerTest {
     }
   }
 
+  @Ignore
   @Test
   public void testServicesWithRangerPluginRoleCommandOrder() throws AmbariException {
-    URL rootDirectoryURL = StackManagerTest.class.getResource("/");
-    Assert.notNull(rootDirectoryURL);
-
-    File resourcesDirectory = new File(new File(rootDirectoryURL.getFile()).getParentFile().getParentFile(), "src/main/resources");
-
-    File stackRoot = new File(resourcesDirectory, "stacks");
-    File commonServices = new File(resourcesDirectory, "common-services");
+    // Given
+    String stackRoot = ClassLoader.getSystemClassLoader().getResource("stacks").getPath().replace("test-classes","classes");
+    String commonServices = ClassLoader.getSystemClassLoader().getResource("common-services").getPath().replace("test-classes","classes");
 
     MetainfoDAO metaInfoDao = createNiceMock(MetainfoDAO.class);
     StackDAO stackDao = createNiceMock(StackDAO.class);
@@ -710,7 +701,7 @@ public class StackManagerTest {
 
     OsFamily osFamily = new OsFamily(config);
 
-    StackManager stackManager = new StackManager(stackRoot, commonServices, osFamily, metaInfoDao, actionMetadata, stackDao);
+    StackManager stackManager = new StackManager(new File(stackRoot), new File(commonServices), osFamily, metaInfoDao, actionMetadata, stackDao);
 
     String rangerUserSyncRoleCommand = Role.RANGER_USERSYNC + "-" + RoleCommand.START;
     String rangerAdminRoleCommand = Role.RANGER_ADMIN + "-" + RoleCommand.START;
@@ -792,6 +783,14 @@ public class StackManagerTest {
 
     assertTrue(rangerUserSyncRoleCommand + " should be dependent of " + rangerAdminRoleCommand, rangerUserSyncBlockers.contains(rangerAdminRoleCommand));
     assertTrue(rangerUserSyncRoleCommand + " should be dependent of " + kmsRoleCommand, rangerUserSyncBlockers.contains(kmsRoleCommand));
+
+    // Zookeeper Server
+    ArrayList<String> zookeeperBlockers = (ArrayList<String>)generalDeps.get(zookeeperServerRoleCommand);
+
+    assertTrue(zookeeperServerRoleCommand + " should be dependent of " + rangerUserSyncRoleCommand, zookeeperBlockers.contains(rangerUserSyncRoleCommand));
+
   }
+
+
   //todo: component override assertions
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/4d3839c7/ambari-server/src/test/java/org/apache/ambari/server/state/ConfigHelperTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/state/ConfigHelperTest.java b/ambari-server/src/test/java/org/apache/ambari/server/state/ConfigHelperTest.java
index 9fe0fc3..98424b7 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/state/ConfigHelperTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/state/ConfigHelperTest.java
@@ -48,7 +48,6 @@ import org.apache.ambari.server.controller.spi.ClusterController;
 import org.apache.ambari.server.orm.DBAccessor;
 import org.apache.ambari.server.orm.GuiceJpaInitializer;
 import org.apache.ambari.server.orm.InMemoryDefaultTestModule;
-import org.apache.ambari.server.orm.dao.HostRoleCommandDAO;
 import org.apache.ambari.server.security.SecurityHelper;
 import org.apache.ambari.server.security.TestAuthenticationFactory;
 import org.apache.ambari.server.stack.StackManagerFactory;
@@ -755,7 +754,6 @@ public class ConfigHelperTest {
           bind(Clusters.class).toInstance(createNiceMock(ClustersImpl.class));
           bind(ClusterController.class).toInstance(clusterController);
           bind(StackManagerFactory.class).toInstance(createNiceMock(StackManagerFactory.class));
-          bind(HostRoleCommandDAO.class).toInstance(createNiceMock(HostRoleCommandDAO.class));
         }
       });
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/4d3839c7/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog222Test.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog222Test.java b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog222Test.java
index 077df33..6061e06 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog222Test.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog222Test.java
@@ -19,12 +19,12 @@
 package org.apache.ambari.server.upgrade;
 
 
-import java.lang.reflect.Method;
-import java.util.HashMap;
-import java.util.Map;
-
-import javax.persistence.EntityManager;
-
+import com.google.common.collect.Maps;
+import com.google.gson.Gson;
+import com.google.inject.Guice;
+import com.google.inject.Injector;
+import com.google.inject.Provider;
+import com.google.inject.persist.PersistService;
 import org.apache.ambari.server.actionmanager.ActionManager;
 import org.apache.ambari.server.api.services.AmbariMetaInfo;
 import org.apache.ambari.server.controller.AmbariManagementController;
@@ -33,7 +33,6 @@ import org.apache.ambari.server.controller.ConfigurationRequest;
 import org.apache.ambari.server.controller.ConfigurationResponse;
 import org.apache.ambari.server.controller.KerberosHelper;
 import org.apache.ambari.server.controller.MaintenanceStateHelper;
-import org.apache.ambari.server.orm.DBAccessor;
 import org.apache.ambari.server.orm.GuiceJpaInitializer;
 import org.apache.ambari.server.orm.InMemoryDefaultTestModule;
 import org.apache.ambari.server.orm.dao.StackDAO;
@@ -41,7 +40,6 @@ import org.apache.ambari.server.orm.entities.StackEntity;
 import org.apache.ambari.server.state.Cluster;
 import org.apache.ambari.server.state.Clusters;
 import org.apache.ambari.server.state.Config;
-import org.apache.ambari.server.state.stack.OsFamily;
 import org.easymock.Capture;
 import org.easymock.EasyMock;
 import org.easymock.EasyMockSupport;
@@ -49,14 +47,10 @@ import org.junit.After;
 import org.junit.Before;
 import org.junit.Test;
 
-import com.google.common.collect.Maps;
-import com.google.gson.Gson;
-import com.google.inject.Binder;
-import com.google.inject.Guice;
-import com.google.inject.Injector;
-import com.google.inject.Module;
-import com.google.inject.Provider;
-import com.google.inject.persist.PersistService;
+import javax.persistence.EntityManager;
+import java.lang.reflect.Method;
+import java.util.HashMap;
+import java.util.Map;
 
 import static org.easymock.EasyMock.anyObject;
 import static org.easymock.EasyMock.anyString;
@@ -64,7 +58,6 @@ import static org.easymock.EasyMock.capture;
 import static org.easymock.EasyMock.createMockBuilder;
 import static org.easymock.EasyMock.createNiceMock;
 import static org.easymock.EasyMock.createStrictMock;
-import static org.easymock.EasyMock.eq;
 import static org.easymock.EasyMock.expect;
 import static org.easymock.EasyMock.expectLastCall;
 import static org.easymock.EasyMock.replay;
@@ -108,7 +101,6 @@ public class UpgradeCatalog222Test {
     Method updateAlerts = UpgradeCatalog222.class.getDeclaredMethod("updateAlerts");
     Method updateStormConfigs = UpgradeCatalog222.class.getDeclaredMethod("updateStormConfigs");
     Method updateAMSConfigs = UpgradeCatalog222.class.getDeclaredMethod("updateAMSConfigs");
-    Method updateHostRoleCommands = UpgradeCatalog222.class.getDeclaredMethod("updateHostRoleCommands");
 
 
     UpgradeCatalog222 upgradeCatalog222 = createMockBuilder(UpgradeCatalog222.class)
@@ -116,7 +108,6 @@ public class UpgradeCatalog222Test {
             .addMockedMethod(updateAlerts)
             .addMockedMethod(updateStormConfigs)
             .addMockedMethod(updateAMSConfigs)
-            .addMockedMethod(updateHostRoleCommands)
             .createMock();
 
     upgradeCatalog222.addNewConfigurationsFromXml();
@@ -127,8 +118,6 @@ public class UpgradeCatalog222Test {
     expectLastCall().once();
     upgradeCatalog222.updateAMSConfigs();
     expectLastCall().once();
-    upgradeCatalog222.updateHostRoleCommands();
-    expectLastCall().once();
 
     replay(upgradeCatalog222);
 
@@ -214,28 +203,4 @@ public class UpgradeCatalog222Test {
 
   }
 
-  @Test
-  public void testUpdateHostRoleCommands() throws Exception {
-    final DBAccessor dbAccessor = createNiceMock(DBAccessor.class);
-    dbAccessor.createIndex(eq("idx_hrc_status"), eq("host_role_command"), eq("status"), eq("role"));
-    expectLastCall().once();
-
-    replay(dbAccessor);
-
-    Module module = new Module() {
-      @Override
-      public void configure(Binder binder) {
-        binder.bind(DBAccessor.class).toInstance(dbAccessor);
-        binder.bind(OsFamily.class).toInstance(createNiceMock(OsFamily.class));
-      }
-    };
-
-    Injector injector = Guice.createInjector(module);
-    UpgradeCatalog222 upgradeCatalog222 = injector.getInstance(UpgradeCatalog222.class);
-    upgradeCatalog222.updateHostRoleCommands();
-
-
-    verify(dbAccessor);
-  }
-
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/4d3839c7/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog240Test.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog240Test.java b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog240Test.java
index f5fafbc..95ae8d8 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog240Test.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog240Test.java
@@ -103,7 +103,6 @@ public class UpgradeCatalog240Test {
   @Test
   public void testExecuteDDLUpdates() throws SQLException, AmbariException {
     Capture<DBAccessor.DBColumnInfo> capturedColumnInfo = newCapture();
-    Capture<DBAccessor.DBColumnInfo> capturedScColumnInfo = newCapture();
     final DBAccessor dbAccessor = createStrictMock(DBAccessor.class);
     Configuration configuration = createNiceMock(Configuration.class);
     Connection connection = createNiceMock(Connection.class);
@@ -112,8 +111,6 @@ public class UpgradeCatalog240Test {
     Capture<List<DBAccessor.DBColumnInfo>> capturedSettingColumns = EasyMock.newCapture();
 
     dbAccessor.addColumn(eq("adminpermission"), capture(capturedColumnInfo));
-    dbAccessor.addColumn(eq(UpgradeCatalog240.SERVICE_COMPONENT_DESIRED_STATE_TABLE), capture(capturedScColumnInfo));
-
     dbAccessor.createTable(eq("setting"), capture(capturedSettingColumns), eq("id"));
     expect(configuration.getDatabaseUrl()).andReturn(Configuration.JDBC_IN_MEMORY_URL).anyTimes();
     expect(dbAccessor.getConnection()).andReturn(connection);
@@ -179,15 +176,6 @@ public class UpgradeCatalog240Test {
     Assert.assertEquals(1, columnInfo.getDefaultValue());
     Assert.assertEquals(false, columnInfo.isNullable());
 
-    // Verify if recovery_enabled column was added to servicecomponentdesiredstate table
-    DBAccessor.DBColumnInfo columnScInfo = capturedScColumnInfo.getValue();
-    Assert.assertNotNull(columnScInfo);
-    Assert.assertEquals(UpgradeCatalog240.RECOVERY_ENABLED_COL, columnScInfo.getName());
-    Assert.assertEquals(null, columnScInfo.getLength());
-    Assert.assertEquals(Short.class, columnScInfo.getType());
-    Assert.assertEquals(0, columnScInfo.getDefaultValue());
-    Assert.assertEquals(false, columnScInfo.isNullable());
-
     Map<String, Class> expectedCaptures = new HashMap<>();
     expectedCaptures.put("id", Long.class);
     expectedCaptures.put("name", String.class);

http://git-wip-us.apache.org/repos/asf/ambari/blob/4d3839c7/ambari-server/src/test/java/org/apache/ambari/server/utils/StageUtilsTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/utils/StageUtilsTest.java b/ambari-server/src/test/java/org/apache/ambari/server/utils/StageUtilsTest.java
index 215d137..854263c 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/utils/StageUtilsTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/utils/StageUtilsTest.java
@@ -37,7 +37,6 @@ import org.apache.ambari.server.agent.ExecutionCommand;
 import org.apache.ambari.server.api.services.AmbariMetaInfo;
 import org.apache.ambari.server.orm.DBAccessor;
 import org.apache.ambari.server.orm.dao.HostDAO;
-import org.apache.ambari.server.orm.dao.HostRoleCommandDAO;
 import org.apache.ambari.server.security.SecurityHelper;
 import org.apache.ambari.server.security.encryption.CredentialStoreService;
 import org.apache.ambari.server.stack.StackManagerFactory;
@@ -119,7 +118,6 @@ public class StageUtilsTest extends EasyMockSupport {
         bind(HostRoleCommandFactory.class).to(HostRoleCommandFactoryImpl.class);
         bind(HostDAO.class).toInstance(createNiceMock(HostDAO.class));
         bind(PersistedState.class).toInstance(createNiceMock(PersistedState.class));
-        bind(HostRoleCommandDAO.class).toInstance(createNiceMock(HostRoleCommandDAO.class));
       }
     });
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/4d3839c7/ambari-server/src/test/python/stacks/2.0.6/common/test_stack_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/common/test_stack_advisor.py b/ambari-server/src/test/python/stacks/2.0.6/common/test_stack_advisor.py
index e15582e..7c578f2 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/common/test_stack_advisor.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/common/test_stack_advisor.py
@@ -1820,7 +1820,7 @@ class TestHDP206StackAdvisor(TestCase):
     )
     recommendedDefaults = {"property1": "file:///grid/0/var/dir"}
     warn = self.stackAdvisor.validatorNotRootFs(properties, recommendedDefaults, 'property1', hostInfo)
-    self.assertTrue(warn != None)
+    self.assertIsNotNone(warn)
     self.assertEquals({'message': 'It is not recommended to use root partition for property1', 'level': 'WARN'}, warn)
 
     # Set by user /var mountpoint, which is non-root , but not preferable - no warning
@@ -1831,7 +1831,7 @@ class TestHDP206StackAdvisor(TestCase):
         "mountpoint" : "/var"
       }
     )
-    self.assertTrue(self.stackAdvisor.validatorNotRootFs(properties, recommendedDefaults, 'property1', hostInfo) == None)
+    self.assertIsNone(self.stackAdvisor.validatorNotRootFs(properties, recommendedDefaults, 'property1', hostInfo))
 
   def test_validatorEnoughDiskSpace(self):
     reqiuredDiskSpace = 1048576
@@ -1847,7 +1847,7 @@ class TestHDP206StackAdvisor(TestCase):
       }
     ]}
     properties = {"property1": "file:///var/dir"}
-    self.assertTrue(self.stackAdvisor.validatorEnoughDiskSpace(properties, 'property1', hostInfo, reqiuredDiskSpace) == None)
+    self.assertIsNone(self.stackAdvisor.validatorEnoughDiskSpace(properties, 'property1', hostInfo, reqiuredDiskSpace))
 
     # local FS, no enough space
     hostInfo = {"disk_info": [
@@ -1858,16 +1858,16 @@ class TestHDP206StackAdvisor(TestCase):
       }
     ]}
     warn = self.stackAdvisor.validatorEnoughDiskSpace(properties, 'property1', hostInfo, reqiuredDiskSpace)
-    self.assertTrue(warn != None)
+    self.assertIsNotNone(warn)
     self.assertEquals({'message': errorMsg, 'level': 'WARN'}, warn)
 
     # non-local FS, HDFS
     properties = {"property1": "hdfs://h1"}
-    self.assertTrue(self.stackAdvisor.validatorEnoughDiskSpace(properties, 'property1', hostInfo, reqiuredDiskSpace) == None)
+    self.assertIsNone(self.stackAdvisor.validatorEnoughDiskSpace(properties, 'property1', hostInfo, reqiuredDiskSpace))
 
     # non-local FS, WASB
     properties = {"property1": "wasb://h1"}
-    self.assertTrue(self.stackAdvisor.validatorEnoughDiskSpace(properties, 'property1', hostInfo, reqiuredDiskSpace) == None)
+    self.assertIsNone(self.stackAdvisor.validatorEnoughDiskSpace(properties, 'property1', hostInfo, reqiuredDiskSpace))
 
   def test_round_to_n(self):
     self.assertEquals(self.stack_advisor_impl.round_to_n(0), 0)

http://git-wip-us.apache.org/repos/asf/ambari/blob/4d3839c7/ambari-server/src/test/python/stacks/2.2/common/test_stack_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.2/common/test_stack_advisor.py b/ambari-server/src/test/python/stacks/2.2/common/test_stack_advisor.py
index d230030..14a28d3 100644
--- a/ambari-server/src/test/python/stacks/2.2/common/test_stack_advisor.py
+++ b/ambari-server/src/test/python/stacks/2.2/common/test_stack_advisor.py
@@ -1028,7 +1028,7 @@ class TestHDP22StackAdvisor(TestCase):
       'hive-site': {
         'properties': {
           'hive.server2.enable.doAs': 'true',
-          'hive.server2.tez.default.queues': "queue1,queue2",
+          'hive.server2.tez.default.queues': "default",
           'hive.server2.tez.initialize.default.sessions': 'false',
           'hive.server2.tez.sessions.per.default.queue': '1',
           'hive.auto.convert.join.noconditionaltask.size': '268435456',
@@ -1073,16 +1073,7 @@ class TestHDP22StackAdvisor(TestCase):
          'hive.server2.authentication.kerberos.keytab': {'delete': 'true'},
          'hive.server2.authentication.ldap.url': {'delete': 'true'},
          'hive.server2.tez.default.queues': {
-           "entries": [
-             {
-               "value": "queue1",
-               "label": "queue1 queue"
-             },
-             {
-               "value": "queue2",
-               "label": "queue2 queue"
-             }
-           ]
+           'entries': [{'value': 'default', 'label': 'default queue'}]
           }
         }
       },
@@ -2061,7 +2052,6 @@ class TestHDP22StackAdvisor(TestCase):
           "timeline.metrics.cluster.aggregate.splitpoints": " ",
           "timeline.metrics.host.aggregate.splitpoints": " ",
           "timeline.metrics.host.aggregator.ttl": "1",
-          "timeline.metrics.service.handler.thread.count": "20",
           'timeline.metrics.service.watcher.disabled': 'false'
         }
       }

http://git-wip-us.apache.org/repos/asf/ambari/blob/4d3839c7/ambari-server/src/test/python/stacks/2.3/common/test_stack_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.3/common/test_stack_advisor.py b/ambari-server/src/test/python/stacks/2.3/common/test_stack_advisor.py
index 03ae6cc..545a2b5 100644
--- a/ambari-server/src/test/python/stacks/2.3/common/test_stack_advisor.py
+++ b/ambari-server/src/test/python/stacks/2.3/common/test_stack_advisor.py
@@ -1630,7 +1630,6 @@ class TestHDP23StackAdvisor(TestCase):
   def test_validateHAWQConfigurations(self):
     services = self.load_json("services-hawq-3-hosts.json")
     # setup default configuration values
-    # Test hawq_rm_yarn_address and hawq_rm_scheduler_address are set correctly
     configurations = services["configurations"]
     configurations["hawq-site"] = {"properties": {"hawq_rm_yarn_address": "localhost:8032",
                                                   "hawq_rm_yarn_scheduler_address": "localhost:8030"}}
@@ -1665,48 +1664,3 @@ class TestHDP23StackAdvisor(TestCase):
     self.assertEqual(len(problems), 2)
     self.assertEqual(problems_dict, expected_warnings)
 
-    # Test hawq_global_rm_type validation
-    services = {
-                 "services" : [
-                   {
-                     "StackServices" : {
-                     "service_name" : "HAWQ"
-                     },
-                     "components": []
-                   } ],
-                 "configurations":
-                   {
-                     "hawq-site": {
-                       "properties": {
-                         "hawq_global_rm_type": "yarn"
-                       }
-                     }
-                   }
-                }
-    properties = services["configurations"]["hawq-site"]["properties"]
-
-    # case 1: hawq_global_rm_type is set as yarn, but YARN service is not installed. Validation error expected.
-    """
-    Validation error expected is as below:
-                    [ {
-                          "config-type": "hawq-site",
-                          "message": "hawq_global_rm_type must be set to none if YARN service is not installed",
-                          "type": "configuration",
-                          "config-name": "hawq_global_rm_type",
-                          "level": "ERROR"
-                    } ]
-    """
-    problems = self.stackAdvisor.validateHAWQConfigurations(properties, defaults, services["configurations"], services, hosts)
-    self.assertEqual(len(problems), 1)
-    self.assertEqual(problems[0]["config-type"], "hawq-site")
-    self.assertEqual(problems[0]["message"], "hawq_global_rm_type must be set to none if YARN service is not installed")
-    self.assertEqual(problems[0]["type"], "configuration")
-    self.assertEqual(problems[0]["config-name"], "hawq_global_rm_type")
-    self.assertEqual(problems[0]["level"], "ERROR")
-
-
-    # case 2: hawq_global_rm_type is set as yarn, and YARN service is installed. No validation errors expected.
-    services["services"].append({"StackServices" : {"service_name" : "YARN"}, "components":[]})
-
-    problems = self.stackAdvisor.validateHAWQConfigurations(properties, defaults, services["configurations"], services, hosts)
-    self.assertEqual(len(problems), 0)

http://git-wip-us.apache.org/repos/asf/ambari/blob/4d3839c7/ambari-web/app/assets/test/tests.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/assets/test/tests.js b/ambari-web/app/assets/test/tests.js
index 4edcc5e..06c4c31 100644
--- a/ambari-web/app/assets/test/tests.js
+++ b/ambari-web/app/assets/test/tests.js
@@ -239,7 +239,6 @@ var files = [
   'test/views/main/dashboard/widgets/uptime_text_widget_test',
   'test/views/main/dashboard/widgets/node_managers_live_test',
   'test/views/main/dashboard/widgets/datanode_live_test',
-  'test/views/main/dashboard/widgets/hawqsegment_live_test',
   'test/views/main/dashboard/widgets/hbase_average_load_test',
   'test/views/main/dashboard/widgets/hbase_regions_in_transition_test',
   'test/views/main/dashboard/widgets/namenode_rpc_test',

http://git-wip-us.apache.org/repos/asf/ambari/blob/4d3839c7/ambari-web/app/controllers/main/admin/highAvailability/hawq/addStandby/step3_controller.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/controllers/main/admin/highAvailability/hawq/addStandby/step3_controller.js b/ambari-web/app/controllers/main/admin/highAvailability/hawq/addStandby/step3_controller.js
index fd28ad5..513a519 100644
--- a/ambari-web/app/controllers/main/admin/highAvailability/hawq/addStandby/step3_controller.js
+++ b/ambari-web/app/controllers/main/admin/highAvailability/hawq/addStandby/step3_controller.js
@@ -141,7 +141,7 @@ App.AddHawqStandbyWizardStep3Controller = Em.Controller.extend({
   submit: function () {
     if (!this.get('isSubmitDisabled')) {
       dataDir = this.get('hawqProps').items[0].properties['hawq_master_directory'];
-      hawqStandby = this.get('content.hawqHosts.newHawqStandby');
+      hawqStandby = this.get('hawqProps').items[0].properties['hawq_standby_address_host']
       App.showConfirmationPopup(
         function() {
           App.get('router.mainAdminKerberosController').getKDCSessionState(function() {

http://git-wip-us.apache.org/repos/asf/ambari/blob/4d3839c7/ambari-web/app/mappers/components_state_mapper.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/mappers/components_state_mapper.js b/ambari-web/app/mappers/components_state_mapper.js
index ac3e1b5..0f2b627 100644
--- a/ambari-web/app/mappers/components_state_mapper.js
+++ b/ambari-web/app/mappers/components_state_mapper.js
@@ -59,11 +59,6 @@ App.componentsStateMapper = App.QuickDataMapper.create({
       node_managers_installed: 'INSTALLED_PATH',
       node_managers_total: 'TOTAL_PATH'
     },
-    'HAWQSEGMENT': {
-      hawq_segments_started: 'STARTED_PATH',
-      hawq_segments_installed: 'INSTALLED_PATH',
-      hawq_segments_total: 'TOTAL_PATH'
-    },
     'HBASE_REGIONSERVER': {
       region_servers_started: 'STARTED_PATH',
       region_servers_installed: 'INSTALLED_PATH',

http://git-wip-us.apache.org/repos/asf/ambari/blob/4d3839c7/ambari-web/app/messages.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/messages.js b/ambari-web/app/messages.js
index 95f87f8..1246a5c 100644
--- a/ambari-web/app/messages.js
+++ b/ambari-web/app/messages.js
@@ -2532,7 +2532,6 @@ Em.I18n.translations = {
   'dashboard.widgets.YARNLinks': 'YARN Links',
   'dashboard.widgets.error.invalid': 'Invalid! Enter a number between 0 - {0}',
   'dashboard.widgets.error.smaller': 'Threshold 1 should be smaller than threshold 2!',
-  'dashboard.widgets.HawqSegmentUp': 'HAWQ Segments Live',
 
   'dashboard': {
     'widgets': {
@@ -2637,10 +2636,6 @@ Em.I18n.translations = {
   'dashboard.services.hbase.masterStarted':'Master Started',
   'dashboard.services.hbase.masterActivated':'Master Activated',
 
-  'dashboard.services.hawq.segments.started':'started',
-  'dashboard.services.hawq.segments.stopped':'stopped',
-  'dashboard.services.hawq.segments.total':'in total',
-
   'dashboard.services.hive.clients':'Hive Clients',
   'dashboard.services.hive.client':'Hive Client',
   'dashboard.services.hive.metastore':'Hive Metastore',

http://git-wip-us.apache.org/repos/asf/ambari/blob/4d3839c7/ambari-web/app/models/alerts/alert_definition.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/models/alerts/alert_definition.js b/ambari-web/app/models/alerts/alert_definition.js
index 6c25f7e..e91bd4f 100644
--- a/ambari-web/app/models/alerts/alert_definition.js
+++ b/ambari-web/app/models/alerts/alert_definition.js
@@ -41,8 +41,8 @@ App.AlertDefinition = DS.Model.extend({
   groups: DS.hasMany('App.AlertGroup'),
   reporting: DS.hasMany('App.AlertReportDefinition'),
   parameters: DS.hasMany('App.AlertDefinitionParameter'),
-  lastTriggered: 0,
-  lastTriggeredRaw: 0,
+  lastTriggered: DS.attr('number'),
+  lastTriggeredRaw: DS.attr('number'),
 
   //relates only to SCRIPT-type alert definition
   location: DS.attr('string'),

http://git-wip-us.apache.org/repos/asf/ambari/blob/4d3839c7/ambari-web/app/views.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views.js b/ambari-web/app/views.js
index 18b43a8..4b6b857 100644
--- a/ambari-web/app/views.js
+++ b/ambari-web/app/views.js
@@ -220,7 +220,6 @@ require('views/main/dashboard/widgets/namenode_heap');
 require('views/main/dashboard/widgets/namenode_cpu');
 require('views/main/dashboard/widgets/hdfs_capacity');
 require('views/main/dashboard/widgets/datanode_live');
-require('views/main/dashboard/widgets/hawqsegment_live');
 require('views/main/dashboard/widgets/namenode_rpc');
 require('views/main/dashboard/widgets/metrics_memory');
 require('views/main/dashboard/widgets/metrics_network');

http://git-wip-us.apache.org/repos/asf/ambari/blob/4d3839c7/ambari-web/app/views/common/configs/widgets/list_config_widget_view.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views/common/configs/widgets/list_config_widget_view.js b/ambari-web/app/views/common/configs/widgets/list_config_widget_view.js
index 568c405..fcf2aac 100644
--- a/ambari-web/app/views/common/configs/widgets/list_config_widget_view.js
+++ b/ambari-web/app/views/common/configs/widgets/list_config_widget_view.js
@@ -288,15 +288,16 @@ App.ListConfigWidgetView = App.ConfigWidgetView.extend({
   },
 
   isOptionExist: function(value) {
-    var isExist = true;
-    if (Em.isNone(value)) {
-      return !isExist;
-    } else {
+    var isExist = false;
+    if (value !== null && value !== undefined) {
       value = Em.typeOf(value) == 'string' ? value.split(',') : value;
       value.forEach(function(item) {
-        isExist = isExist && this.get('options').mapProperty('value').contains(item);
+        isExist = this.get('options').mapProperty('value').contains(item);
       }, this);
       return isExist;
+    } else {
+      return false;
     }
   }
+
 });

http://git-wip-us.apache.org/repos/asf/ambari/blob/4d3839c7/ambari-web/app/views/main/dashboard/widgets.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views/main/dashboard/widgets.js b/ambari-web/app/views/main/dashboard/widgets.js
index 8a86af6..a2fb281 100644
--- a/ambari-web/app/views/main/dashboard/widgets.js
+++ b/ambari-web/app/views/main/dashboard/widgets.js
@@ -128,8 +128,7 @@ App.MainDashboardWidgetsView = Em.View.extend(App.UserPref, App.LocalStorage, Ap
       '13', '12', '14', '16', //hbase
       '17', '18', '19', '20', '23', // all yarn
       '21', // storm
-      '22', // flume
-      '24' // hawq
+      '22' // flume
     ]; // all in order
     var hiddenFull = [
       ['15', 'Region In Transition']
@@ -174,12 +173,6 @@ App.MainDashboardWidgetsView = Em.View.extend(App.UserPref, App.LocalStorage, Ap
         visibleFull = visibleFull.without(item);
       }, this);
     }
-    if (this.get('hawq_model') == null) {
-      var hawq = ['24'];
-      hawq.forEach(function (item) {
-        visibleFull = visibleFull.without(item);
-      }, this);
-    }
     var obj = this.get('initPrefObject');
     obj.set('visible', visibleFull);
     obj.set('hidden', hiddenFull);
@@ -199,8 +192,6 @@ App.MainDashboardWidgetsView = Em.View.extend(App.UserPref, App.LocalStorage, Ap
 
   flume_model: null,
 
-  hawq_model: null,
-
   /**
    * List of visible widgets
    * @type {Ember.Enumerable}
@@ -392,8 +383,7 @@ App.MainDashboardWidgetsView = Em.View.extend(App.UserPref, App.LocalStorage, Ap
       hbase_model: ['12', '13', '14', '15', '16'],
       yarn_model: ['17', '18', '19', '20', '23'],
       storm_model: ['21'],
-      flume_model: ['22'],
-      hawq_model: ['24']
+      flume_model: ['22']
     };
 
     // check each service, find out the newly added service and already deleted service
@@ -460,8 +450,7 @@ App.MainDashboardWidgetsView = Em.View.extend(App.UserPref, App.LocalStorage, Ap
       '20': App.YARNMemoryPieChartView,
       '21': App.SuperVisorUpView,
       '22': App.FlumeAgentUpView,
-      '23': App.YARNLinksView,
-      '24': App.HawqSegmentUpView
+      '23': App.YARNLinksView
     }, id);
   },
 
@@ -478,7 +467,7 @@ App.MainDashboardWidgetsView = Em.View.extend(App.UserPref, App.LocalStorage, Ap
     visible: [],
     hidden: [],
     threshold: {1: [80, 90], 2: [85, 95], 3: [90, 95], 4: [80, 90], 5: [1000, 3000], 6: [], 7: [], 8: [], 9: [], 10: [], 11: [], 12: [], 13: [70, 90], 14: [150, 250], 15: [3, 10], 16: [],
-      17: [70, 90], 18: [], 19: [50, 75], 20: [50, 75], 21: [85, 95], 22: [85, 95], 23: [], 24: [80, 90]} // id:[thresh1, thresh2]
+      17: [70, 90], 18: [], 19: [50, 75], 20: [50, 75], 21: [85, 95], 22: [85, 95], 23: []} // id:[thresh1, thresh2]
   }),
 
   /**

http://git-wip-us.apache.org/repos/asf/ambari/blob/4d3839c7/ambari-web/app/views/main/dashboard/widgets/hawqsegment_live.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views/main/dashboard/widgets/hawqsegment_live.js b/ambari-web/app/views/main/dashboard/widgets/hawqsegment_live.js
deleted file mode 100644
index e8d0656..0000000
--- a/ambari-web/app/views/main/dashboard/widgets/hawqsegment_live.js
+++ /dev/null
@@ -1,190 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-var App = require('app');
-
-App.HawqSegmentUpView = App.TextDashboardWidgetView.extend({
-
-  title: Em.I18n.t('dashboard.widgets.HawqSegmentUp'),
-  id: '24',
-
-  isPieChart: false,
-  isText: true,
-  isProgressBar: false,
-  model_type: 'hawq',
-
-  hiddenInfo: function () {
-    var result = [];
-    result.pushObject(this.get('hawqSegmentsStarted') + ' ' + Em.I18n.t('dashboard.services.hawq.segments.started'));
-    result.pushObject(this.get('hawqSegmentsInstalled') + ' ' + Em.I18n.t('dashboard.services.hawq.segments.stopped'));
-    result.pushObject(this.get('hawqSegmentsTotal')+ ' ' + Em.I18n.t('dashboard.services.hawq.segments.total'));
-    return result;
-  }.property('hawqSegmentsStarted', 'hawqSegmentsInstalled', 'hawqSegmentsTotal'),
-  hiddenInfoClass: "hidden-info-three-line",
-
-  thresh1: 40,
-  thresh2: 70,
-  maxValue: 100,
-
-  hawqSegmentsStarted: function () {
-    if (Em.isNone(this.get('model.hawqSegmentsStarted'))) {
-      return Em.I18n.t('services.service.summary.notAvailable');
-    }
-    return this.get('model.hawqSegmentsStarted');
-  }.property('model.hawqSegmentsStarted'),
-
-  hawqSegmentsInstalled: function () {
-    if (Em.isNone(this.get('model.hawqSegmentsInstalled'))) {
-      return Em.I18n.t('services.service.summary.notAvailable');
-    }
-    return this.get('model.hawqSegmentsInstalled');
-  }.property('model.hawqSegmentsInstalled'),
-
-  hawqSegmentsTotal: function () {
-    if (Em.isNone(this.get('model.hawqSegmentsTotal'))) {
-      return Em.I18n.t('services.service.summary.notAvailable');
-    }
-    return this.get('model.hawqSegmentsTotal');
-  }.property('model.hawqSegmentsTotal'),
-
-  data: function () {
-    if (Em.isNone(this.get('model.hawqSegmentsStarted')) || Em.isNone(this.get('model.hawqSegmentsTotal'))) {
-      return null;
-    } else {
-      return ((this.get('hawqSegmentsStarted') / this.get('model.hawqSegmentsTotal')).toFixed(2)) * 100;
-    }
-  }.property('model.hawqSegmentsTotal', 'hawqSegmentsStarted'),
-
-  content: function () {
-    if (Em.isNone(this.get('model.hawqSegmentsStarted')) || Em.isNone(this.get('model.hawqSegmentsTotal'))) {
-      return Em.I18n.t('services.service.summary.notAvailable');
-    } else {
-      return this.get('hawqSegmentsStarted') + "/" + this.get('model.hawqSegmentsTotal');
-    }
-  }.property('model.hawqSegmentsTotal', 'hawqSegmentsStarted'),
-
-  editWidget: function (event) {
-    var parent = this;
-    var max_tmp =  parseFloat(parent.get('maxValue'));
-    var configObj = Ember.Object.create({
-      thresh1: parent.get('thresh1') + '',
-      thresh2: parent.get('thresh2') + '',
-      hintInfo: Em.I18n.t('dashboard.widgets.hintInfo.hint1').format(max_tmp),
-      isThresh1Error: false,
-      isThresh2Error: false,
-      errorMessage1: "",
-      errorMessage2: "",
-      maxValue: max_tmp,
-      observeNewThresholdValue: function () {
-        var thresh1 = this.get('thresh1');
-        var thresh2 = this.get('thresh2');
-        if (thresh1.trim() != "") {
-          if (isNaN(thresh1) || thresh1 > max_tmp || thresh1 < 0){
-            this.set('isThresh1Error', true);
-            this.set('errorMessage1', 'Invalid! Enter a number between 0 - ' + max_tmp);
-          } else if ( this.get('isThresh2Error') === false && parseFloat(thresh2)<= parseFloat(thresh1)) {
-            this.set('isThresh1Error', true);
-            this.set('errorMessage1', 'Threshold 1 should be smaller than threshold 2 !');
-          } else {
-            this.set('isThresh1Error', false);
-            this.set('errorMessage1', '');
-          }
-        } else {
-          this.set('isThresh1Error', true);
-          this.set('errorMessage1', 'This is required');
-        }
-
-        if (thresh2.trim() != "") {
-          if (isNaN(thresh2) || thresh2 > max_tmp || thresh2 < 0) {
-            this.set('isThresh2Error', true);
-            this.set('errorMessage2', 'Invalid! Enter a number between 0 - ' + max_tmp);
-          } else {
-            this.set('isThresh2Error', false);
-            this.set('errorMessage2', '');
-          }
-        } else {
-          this.set('isThresh2Error', true);
-          this.set('errorMessage2', 'This is required');
-        }
-
-        // update the slider handles and color
-        if (this.get('isThresh1Error') === false && this.get('isThresh2Error') === false) {
-          $("#slider-range").slider('values', 0 , parseFloat(thresh1));
-          $("#slider-range").slider('values', 1 , parseFloat(thresh2));
-        }
-      }.observes('thresh1', 'thresh2')
-
-    });
-
-    var browserVerion = this.getInternetExplorerVersion();
-    App.ModalPopup.show({
-      header: Em.I18n.t('dashboard.widgets.popupHeader'),
-      classNames: [ 'sixty-percent-width-modal-edit-widget'],
-      bodyClass: Ember.View.extend({
-        templateName: require('templates/main/dashboard/edit_widget_popup'),
-        configPropertyObj: configObj
-      }),
-      primary: Em.I18n.t('common.apply'),
-      onPrimary: function () {
-        configObj.observeNewThresholdValue();
-        if (!configObj.isThresh1Error && !configObj.isThresh2Error) {
-          parent.set('thresh1', parseFloat(configObj.get('thresh1')) );
-          parent.set('thresh2', parseFloat(configObj.get('thresh2')) );
-          if (!App.get('testMode')) {
-            var big_parent = parent.get('parentView');
-            big_parent.getUserPref(big_parent.get('persistKey'));
-            var oldValue = big_parent.get('currentPrefObject');
-            oldValue.threshold[parseInt(parent.id)] = [configObj.get('thresh1'), configObj.get('thresh2')];
-            big_parent.postUserPref(big_parent.get('persistKey'),oldValue);
-          }
-          this.hide();
-        }
-      },
-
-      didInsertElement: function () {
-        var handlers = [configObj.get('thresh1'), configObj.get('thresh2')];
-        var colors = [App.healthStatusRed, App.healthStatusOrange, App.healthStatusGreen]; //color red, orange, green
-
-        if (browserVerion == -1 || browserVerion > 9) {
-          configObj.set('isIE9', false);
-          configObj.set('isGreenOrangeRed', false);
-          $("#slider-range").slider({
-            range: true,
-            min: 0,
-            max: max_tmp,
-            values: handlers,
-            create: function (event, ui) {
-              parent.updateColors(handlers, colors);
-            },
-            slide: function (event, ui) {
-              parent.updateColors(ui.values, colors);
-              configObj.set('thresh1', ui.values[0] + '');
-              configObj.set('thresh2', ui.values[1] + '');
-            },
-            change: function (event, ui) {
-              parent.updateColors(ui.values, colors);
-            }
-          });
-        } else {
-          configObj.set('isIE9', true);
-          configObj.set('isGreenOrangeRed', false);
-        }
-      }
-    });
-  }
-});


[20/50] [abbrv] ambari git commit: Merge branch 'trunk' into branch-dev-patch-upgrade

Posted by nc...@apache.org.
Merge branch 'trunk' into branch-dev-patch-upgrade


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/c11a4113
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/c11a4113
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/c11a4113

Branch: refs/heads/trunk
Commit: c11a41138b02994e54070177efc99241bebfade0
Parents: 0f2c337 f57f9b2
Author: Nate Cole <nc...@hortonworks.com>
Authored: Fri Feb 5 16:55:44 2016 -0500
Committer: Nate Cole <nc...@hortonworks.com>
Committed: Fri Feb 5 16:55:44 2016 -0500

----------------------------------------------------------------------
 .../loginActivities/LoginActivitiesMainCtrl.js  |   4 +-
 .../loginActivities/LoginMessageMainCtrl.js     |   5 +-
 .../stackVersions/StackVersionsCreateCtrl.js    |   2 +-
 .../stackVersions/StackVersionsEditCtrl.js      |  10 +
 .../ui/admin-web/app/scripts/i18n.config.js     |   6 +-
 .../ui/admin-web/app/scripts/routes.js          |   9 +-
 .../admin-web/app/scripts/services/Cluster.js   |  15 +-
 .../ui/admin-web/app/scripts/services/Stack.js  |   2 +-
 .../ui/admin-web/app/views/leftNavbar.html      |   2 +-
 .../app/views/loginActivities/loginMessage.html |  25 +-
 .../app/views/loginActivities/main.html         |   4 +-
 .../src/main/resources/ui/admin-web/gulpfile.js |   1 -
 ambari-agent/pom.xml                            | 118 +++++--
 .../src/main/package/dependencies.properties    |  32 ++
 .../libraries/functions/conf_select.py          |   2 +-
 .../src/main/repo/install_ambari_tarball.py     | 250 +++++++++++++
 ambari-metrics/ambari-metrics-assembly/pom.xml  | 348 +++++++++++++------
 .../src/main/assembly/grafana.xml               |  59 ++++
 ambari-metrics/ambari-metrics-grafana/README.md | 243 +++++++++++++
 .../ambari-metrics/datasource.js                | 304 ++++++++++++++++
 .../ambari-metrics/directives.js                |  36 ++
 .../ambari-metrics/partials/config.html         |  19 +
 .../ambari-metrics/partials/query.editor.html   | 133 +++++++
 .../ambari-metrics/partials/query.options.html  |  42 +++
 .../ambari-metrics/plugin.json                  |  14 +
 .../ambari-metrics/queryCtrl.js                 | 131 +++++++
 .../conf/unix/ambari-metrics-grafana            | 181 ++++++++++
 .../conf/unix/ams-grafana-env.sh                |  29 ++
 .../conf/unix/ams-grafana.ini                   | 255 ++++++++++++++
 ambari-metrics/ambari-metrics-grafana/pom.xml   | 125 +++++++
 .../screenshots/1-add-datasource.png            | Bin 0 -> 108602 bytes
 .../screenshots/10-choose-hostname.png          | Bin 0 -> 298654 bytes
 .../11-choose-agg-rate-precision.png            | Bin 0 -> 276486 bytes
 .../screenshots/12-change-panel-title.png       | Bin 0 -> 258642 bytes
 .../screenshots/13-save-dashboard.png           | Bin 0 -> 262005 bytes
 .../screenshots/14-change-timerange.png         | Bin 0 -> 310766 bytes
 .../screenshots/15-change-units.png             | Bin 0 -> 322069 bytes
 .../screenshots/16-display-style-graph-1.png    | Bin 0 -> 285467 bytes
 .../screenshots/17-series-specific-override.png | Bin 0 -> 302825 bytes
 .../screenshots/18-override-time.png            | Bin 0 -> 255655 bytes
 .../screenshots/19-edit-graph.png               | Bin 0 -> 191904 bytes
 .../screenshots/2-datasource-details.png        | Bin 0 -> 125313 bytes
 .../screenshots/3-test-datasource.png           | Bin 0 -> 136121 bytes
 .../screenshots/4-dashboard-dropdown.png        | Bin 0 -> 126964 bytes
 .../screenshots/5-dashboard-graph-menu.png      | Bin 0 -> 146851 bytes
 .../screenshots/6-graph-panels.png              | Bin 0 -> 105383 bytes
 .../screenshots/7-choose-datasource.png         | Bin 0 -> 246860 bytes
 .../screenshots/8-choose-component.png          | Bin 0 -> 199123 bytes
 .../screenshots/9-choose-metric.png             | Bin 0 -> 216473 bytes
 .../screenshots/add-dashboard.png               | Bin 0 -> 107965 bytes
 .../screenshots/full-dashboard.png              | Bin 0 -> 161956 bytes
 .../src/main/assemblies/empty.xml               |  21 ++
 .../metrics/timeline/PhoenixHBaseAccessor.java  |  53 +--
 .../webapp/TimelineWebServices.java             |   7 +-
 .../timeline/AbstractMiniHBaseClusterTest.java  |   1 +
 .../timeline/PhoenixHBaseAccessorTest.java      |   2 +-
 ambari-metrics/pom.xml                          |   5 +
 ambari-server/conf/unix/install-helper.sh       |  52 +--
 ambari-server/pom.xml                           | 148 +++++---
 ambari-server/src/main/assemblies/server.xml    |   2 +-
 .../ambari/server/agent/HeartbeatMonitor.java   |  45 ++-
 .../resources/ClusterResourceDefinition.java    |   3 +-
 .../ambari/server/checks/CheckDescription.java  |   8 +
 .../HardcodedStackVersionPropertiesCheck.java   | 132 +++++++
 .../server/configuration/Configuration.java     |   8 +
 .../AmbariManagementControllerImpl.java         |  44 ++-
 .../ambari/server/controller/AmbariServer.java  |  22 +-
 .../server/controller/KerberosHelper.java       |  26 +-
 .../server/controller/KerberosHelperImpl.java   |   5 +
 .../internal/AlertTargetResourceProvider.java   |  12 +-
 .../BlueprintConfigurationProcessor.java        | 206 +++++++----
 .../serveraction/upgrades/FixLzoCodecPath.java  | 103 ++++++
 .../upgrades/OozieConfigCalculation.java        |  94 +++++
 .../topology/ClusterConfigurationRequest.java   |  63 +++-
 .../ambari/server/topology/Configuration.java   |  15 +
 .../server/upgrade/UpgradeCatalog222.java       |  46 +++
 .../src/main/package/deb/control/postinst       |   6 +-
 .../src/main/package/deb/control/preinst        |  23 +-
 .../src/main/package/deb/control/prerm          |   6 +-
 .../src/main/package/dependencies.properties    |  33 ++
 .../0.1.0/configuration/ams-grafana-env.xml     |  59 ++++
 .../0.1.0/configuration/ams-grafana-ini.xml     | 282 +++++++++++++++
 .../0.1.0/configuration/ams-site.xml            |   4 +-
 .../AMBARI_METRICS/0.1.0/metainfo.xml           |  18 +
 .../AMBARI_METRICS/0.1.0/package/scripts/ams.py |  30 ++
 .../0.1.0/package/scripts/metrics_collector.py  |   2 +-
 .../0.1.0/package/scripts/metrics_grafana.py    |  65 ++++
 .../0.1.0/package/scripts/metrics_monitor.py    |   2 +-
 .../0.1.0/package/scripts/params.py             |   9 +
 .../0.1.0/package/scripts/status.py             |   4 +
 .../0.1.0/package/scripts/status_params.py      |   1 +
 .../OOZIE/4.0.0.2.0/configuration/oozie-env.xml |   3 +
 .../OOZIE/4.2.0.2.3/configuration/oozie-env.xml |   3 +
 .../stacks/HDP/2.0.6/role_command_order.json    |   8 +-
 .../stacks/HDP/2.0.6/services/stack_advisor.py  |  32 +-
 .../HDP/2.1/upgrades/nonrolling-upgrade-2.3.xml |  12 +
 .../services/OOZIE/configuration/oozie-env.xml  |   3 +
 .../services/RANGER/themes/theme_version_1.json |  20 +-
 .../HDP/2.2/upgrades/nonrolling-upgrade-2.2.xml |  12 +
 .../HDP/2.2/upgrades/nonrolling-upgrade-2.3.xml |  13 +-
 .../HDP/2.2/upgrades/nonrolling-upgrade-2.4.xml |  12 +
 .../stacks/HDP/2.2/upgrades/upgrade-2.2.xml     |   8 +
 .../stacks/HDP/2.2/upgrades/upgrade-2.3.xml     |   7 +
 .../stacks/HDP/2.2/upgrades/upgrade-2.4.xml     |   8 +
 .../services/RANGER/themes/theme_version_2.json |  20 +-
 .../HDP/2.3/upgrades/nonrolling-upgrade-2.3.xml |  15 +
 .../HDP/2.3/upgrades/nonrolling-upgrade-2.4.xml |  18 +-
 .../stacks/HDP/2.3/upgrades/upgrade-2.3.xml     |   8 +
 .../stacks/HDP/2.3/upgrades/upgrade-2.4.xml     |   8 +
 .../HDP/2.4/upgrades/nonrolling-upgrade-2.4.xml |  15 +
 .../stacks/HDP/2.4/upgrades/upgrade-2.4.xml     |   8 +
 ...ardcodedStackVersionPropertiesCheckTest.java |  60 ++++
 .../AmbariManagementControllerImplTest.java     |  12 +
 .../server/controller/AmbariServerTest.java     |   9 +
 .../server/controller/KerberosHelperTest.java   |  66 +++-
 .../AlertTargetResourceProviderTest.java        | 150 +++++++-
 .../BlueprintConfigurationProcessorTest.java    | 243 ++++++++++++-
 .../upgrades/FixLzoCodecPathTest.java           |  76 ++++
 .../upgrades/OozieConfigCalculationTest.java    |  87 +++++
 .../ClusterConfigurationRequestTest.java        | 172 ++++++++-
 .../ClusterInstallWithoutStartTest.java         |   2 +
 .../server/topology/ConfigurationTest.java      |  14 +
 .../server/topology/TopologyManagerTest.java    |   2 +
 .../server/upgrade/UpgradeCatalog222Test.java   |  81 +++++
 .../AMBARI_METRICS/test_metrics_grafana.py      |  75 ++++
 .../stacks/2.0.6/common/test_stack_advisor.py   |  54 ++-
 .../python/stacks/2.0.6/configs/default.json    |   9 +
 ambari-web/app/assets/licenses/NOTICE.txt       |   5 +-
 ambari-web/app/assets/test/tests.js             |   2 +
 ambari-web/app/config.js                        |   4 +-
 ambari-web/app/controllers.js                   |   4 +
 .../app/controllers/global/update_controller.js |   4 +-
 .../hawq/activateStandby/step1_controller.js    |  23 ++
 .../hawq/activateStandby/step2_controller.js    | 105 ++++++
 .../hawq/activateStandby/step3_controller.js    | 126 +++++++
 .../hawq/activateStandby/wizard_controller.js   | 128 +++++++
 .../nameNode/rollback_controller.js             |  34 +-
 .../nameNode/step3_controller.js                |   3 +
 .../nameNode/step9_controller.js                |  23 +-
 .../main/admin/highAvailability_controller.js   |   9 +
 .../main/admin/service_auto_start.js            |  21 +-
 .../alerts/definition_configs_controller.js     |  13 +-
 .../manage_alert_notifications_controller.js    |   7 +
 ambari-web/app/controllers/main/host/details.js |  39 ++-
 ambari-web/app/controllers/main/service/item.js | 112 ++++--
 .../app/controllers/wizard/step7_controller.js  |   2 +-
 .../HDP2.3/hawq_activate_standby_properties.js  |  43 +++
 ambari-web/app/data/HDP2.3/site_properties.js   |   7 +
 ambari-web/app/data/HDP2/ha_properties.js       |  68 +++-
 ambari-web/app/data/controller_route.js         |   4 +
 ambari-web/app/messages.js                      |  60 +++-
 ambari-web/app/mixins.js                        |   1 +
 .../app/mixins/common/infinite_scroll_mixin.js  | 173 +++++++++
 .../wizard/wizardProgressPageController.js      |  30 ++
 ambari-web/app/models/alerts/alert_config.js    |   1 +
 ambari-web/app/models/host_component.js         |   9 +-
 ambari-web/app/models/stack_service.js          |   2 +-
 ambari-web/app/router.js                        |   2 +-
 .../app/routes/activate_hawq_standby_routes.js  | 179 ++++++++++
 ambari-web/app/routes/main.js                   |  13 +
 ambari-web/app/styles/alerts.less               |   2 +-
 ambari-web/app/styles/application.less          |   7 +-
 ambari-web/app/styles/log_file_search.less      | 155 +++++++++
 ambari-web/app/styles/widgets.less              |  71 ++--
 .../app/templates/common/log_file_search.hbs    | 109 ++++++
 .../hawq/activateStandby/step1.hbs              |  28 ++
 .../hawq/activateStandby/step2.hbs              |  53 +++
 .../hawq/activateStandby/step3.hbs              |  18 +
 .../hawq/activateStandby/wizard.hbs             |  44 +++
 .../templates/main/admin/service_auto_start.hbs |  11 +-
 ambari-web/app/templates/main/host/logs.hbs     |  62 ++++
 ambari-web/app/templates/main/service/item.hbs  |   2 +-
 ambari-web/app/utils/ajax/ajax.js               |  11 +-
 .../hawq_activate_standby_config_initializer.js |  53 +++
 .../utils/configs/hawq_ha_config_initializer.js |   2 +-
 ambari-web/app/utils/db.js                      |   1 +
 ambari-web/app/views.js                         |   8 +
 .../configs/widgets/config_widget_view.js       |   3 +
 .../app/views/common/form/datepicker_view.js    |  37 ++
 .../app/views/common/log_file_search_view.js    | 272 +++++++++++++++
 .../modal_popups/log_file_search_popup.js       |  26 ++
 .../app/views/common/quick_view_link_view.js    |  14 +-
 ambari-web/app/views/main/admin.js              |  12 +-
 .../hawq/activateStandby/step1_view.js          |  26 ++
 .../hawq/activateStandby/step2_view.js          |  29 ++
 .../hawq/activateStandby/step3_view.js          |  36 ++
 .../hawq/activateStandby/wizard_view.js         |  74 ++++
 .../app/views/main/admin/service_auto_start.js  |  64 +++-
 ambari-web/app/views/main/host/logs_view.js     | 147 ++++++++
 ambari-web/app/views/main/host/menu.js          |  11 +-
 ambari-web/app/views/main/menu.js               |  12 +-
 ambari-web/app/views/main/service/item.js       |   2 +
 ambari-web/config.coffee                        |   2 +
 ambari-web/karma.conf.js                        |   1 +
 ambari-web/pom.xml                              |   3 +-
 .../activateStandby/step2_controller_test.js    |  87 +++++
 .../nameNode/step3_controller_test.js           |  49 ++-
 .../definitions_configs_controller_test.js      |  26 ++
 .../test/controllers/main/service/item_test.js  | 122 +++++--
 .../test/controllers/wizard/step7_test.js       |  20 +-
 .../views/common/log_file_search_view_test.js   | 103 ++++++
 .../test/views/common/quick_link_view_test.js   |  13 +-
 ambari-web/test/views/main/host/menu_test.js    |  20 +-
 .../vendor/scripts/bootstrap-contextmenu.js     | 205 +++++++++++
 pom.xml                                         |  22 +-
 205 files changed, 8085 insertions(+), 664 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/c11a4113/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/stackVersions/StackVersionsCreateCtrl.js
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/c11a4113/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/stackVersions/StackVersionsEditCtrl.js
----------------------------------------------------------------------
diff --cc ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/stackVersions/StackVersionsEditCtrl.js
index 9c5b2eb,39a6700..cd9cf40
--- a/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/stackVersions/StackVersionsEditCtrl.js
+++ b/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/stackVersions/StackVersionsEditCtrl.js
@@@ -80,37 -69,58 +80,47 @@@ angular.module('ambariAdminConsole'
      });
    };
  
 -  $scope.isDeletable = function() {
 -    return !($scope.repoStatus == 'current' || $scope.repoStatus == 'installed');
 +  /**
 +   * Load supported OS list
 +   */
 +  $scope.afterStackVersionRead = function () {
 +    Stack.getSupportedOSList($scope.upgradeStack.stack_name, $scope.upgradeStack.stack_version)
 +      .then(function (data) {
 +        var operatingSystems = data.operating_systems;
 +        operatingSystems.map(function (os) {
 +          var existingOSHash = {};
 +          angular.forEach($scope.osList, function (os) {
 +            existingOSHash[os.OperatingSystems.os_type] = os;
 +          });
 +          // if os not in the list, mark as un-selected, add this to the osList
 +          if (!existingOSHash[os.OperatingSystems.os_type]) {
 +            os.selected = false;
 +            os.repositories.forEach(function(repo) {
 +              repo.Repositories.base_url = '';
 +            });
 +            $scope.osList.push(os);
 +          }
 +        });
 +      })
 +      .catch(function (data) {
 +        Alert.error($t('versions.alerts.osListError'), data.message);
 +      });
    };
  
 -  $scope.addMissingOSList = function() {
 -    return Stack.getSupportedOSList($scope.stackName, $scope.stackVersion)
 -    .then(function (data) {
 -      var existingOSHash = {};
 -      angular.forEach($scope.osList, function (os) {
 -        existingOSHash[os.OperatingSystems.os_type] = os;
 -      });
 -      var osList = data.operating_systems.map(function (os) {
 -        return existingOSHash[os.OperatingSystems.os_type] || {
 -          OperatingSystems: {
 -            os_type : os.OperatingSystems.os_type
 -          },
 -          repositories: [
 -            {
 -              Repositories: {
 -                base_url: '',
 -                repo_id: 'HDP-' + $routeParams.versionId,
 -                repo_name: 'HDP'
 -              }
 -            },
 -            {
 -              Repositories: {
 -                base_url: '',
 -                repo_id: 'HDP-UTILS-' + $routeParams.versionId,
 -                repo_name: 'HDP-UTILS'
 -              }
 -            }
 -          ],
 -          selected: false
 -        };
 -      });
 -      $scope.osList = osList;
 -    })
 -    .catch(function (data) {
 -      Alert.error($t('versions.alerts.osListError'), data.message);
 -    });
 +  $scope.isDeletable = function() {
 +    return !($scope.repoStatus == 'current' || $scope.repoStatus == 'installed');
    };
  
+   $scope.disableUnusedOS = function() {
+     Cluster.getClusterOS().then(function(usedOS){
+       angular.forEach($scope.osList, function (os) {
+         if (os.OperatingSystems.os_type !== usedOS) {
+           os.disabled = true;
+         }
+       });
+     });
+   };
+ 
    $scope.defaulfOSRepos = {};
  
    $scope.save = function () {

http://git-wip-us.apache.org/repos/asf/ambari/blob/c11a4113/ambari-admin/src/main/resources/ui/admin-web/app/scripts/i18n.config.js
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/c11a4113/ambari-admin/src/main/resources/ui/admin-web/app/scripts/services/Stack.js
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/c11a4113/ambari-server/src/main/java/org/apache/ambari/server/configuration/Configuration.java
----------------------------------------------------------------------


[03/50] [abbrv] ambari git commit: Merge branch 'trunk' into branch-dev-patch-upgrade

Posted by nc...@apache.org.
Merge branch 'trunk' into branch-dev-patch-upgrade


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/f31a06da
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/f31a06da
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/f31a06da

Branch: refs/heads/trunk
Commit: f31a06dafc5b6a6a3f0162de5c9b0d09c5e489aa
Parents: 4b36397 a7a9796
Author: Nate Cole <nc...@hortonworks.com>
Authored: Wed Jan 20 12:03:51 2016 -0500
Committer: Nate Cole <nc...@hortonworks.com>
Committed: Wed Jan 20 12:03:51 2016 -0500

----------------------------------------------------------------------
 .../libraries/functions/copy_tarball.py         |   5 +-
 .../metrics/timeline/AMSPropertyProvider.java   |  57 +++--
 .../cache/TimelineAppMetricCacheKey.java        |  36 +--
 .../cache/TimelineMetricsCacheSizeOfEngine.java |   4 +-
 .../package/scripts/job_history_server.py       |   8 +-
 .../SPARK/1.2.0.2.2/package/scripts/params.py   |  12 +
 .../1.2.0.2.2/package/scripts/setup_spark.py    |  18 +-
 .../1.2.0.2.2/package/scripts/spark_client.py   |   4 +-
 .../1.2.0.2.2/package/scripts/spark_service.py  |  22 +-
 .../package/scripts/spark_thrift_server.py      |   8 +-
 .../HDP/2.3/upgrades/nonrolling-upgrade-2.4.xml |   6 +
 .../stacks/HDP/2.3/upgrades/upgrade-2.4.xml     |   6 +
 .../SPARK/configuration/spark-defaults.xml      |  53 +++++
 .../spark-thrift-fairscheduler.xml              |  37 ++++
 .../configuration/spark-thrift-sparkconf.xml    | 164 ++++++++++++++
 .../stacks/HDP/2.4/services/SPARK/metainfo.xml  |  13 ++
 .../stacks/HDP/2.4/upgrades/config-upgrade.xml  |  43 ++++
 ambari-web/app/controllers/main/host/details.js | 219 ++++++++++---------
 ambari-web/app/messages.js                      |   4 +-
 .../configs/add_component_config_initializer.js |   3 +-
 .../views/common/configs/config_history_flow.js |   6 +-
 .../test/controllers/main/host/details_test.js  |  60 ++++-
 .../hive/client/HiveInvalidQueryException.java  |  27 +++
 .../apache/ambari/view/hive/client/Utils.java   |   9 +
 .../hive/utils/ServiceFormattedException.java   |   8 +-
 .../ambari/view/hive/client/UtilsTest.java      |  43 ++++
 docs/pom.xml                                    |  48 +++-
 27 files changed, 753 insertions(+), 170 deletions(-)
----------------------------------------------------------------------



[25/50] [abbrv] ambari git commit: Merge branch 'trunk' into branch-dev-patch-upgrade

Posted by nc...@apache.org.
Merge branch 'trunk' into branch-dev-patch-upgrade


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/1809161e
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/1809161e
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/1809161e

Branch: refs/heads/trunk
Commit: 1809161eb5914c7905745911cecea4d9da7a3f26
Parents: 2a9f49c 96bdecf
Author: Nate Cole <nc...@hortonworks.com>
Authored: Thu Feb 11 08:46:52 2016 -0500
Committer: Nate Cole <nc...@hortonworks.com>
Committed: Thu Feb 11 08:46:52 2016 -0500

----------------------------------------------------------------------
 .../main/python/ambari_agent/RecoveryManager.py |  41 ++---
 .../test/python/ambari_agent/TestActionQueue.py |   2 +-
 .../python/ambari_agent/TestRecoveryManager.py  |  88 +++++-----
 .../src/main/python/ambari_commons/os_check.py  |  73 ++++++---
 .../ambari_commons/resources/os_family.json     | 137 ++++++++--------
 .../libraries/functions/package_conditions.py   |   7 +-
 ambari-funtest/pom.xml                          |   5 -
 .../2.1.1/services/AMBARI_METRICS/metainfo.xml  |   2 +-
 ambari-metrics/ambari-metrics-grafana/README.md |  20 +++
 .../ambari-metrics/datasource.js                | 127 +++++++--------
 .../ambari-metrics/partials/query.editor.html   |   6 +-
 ambari-metrics/ambari-metrics-grafana/pom.xml   |  16 +-
 .../screenshots/20-templating.png               | Bin 0 -> 694376 bytes
 .../timeline/HBaseTimelineMetricStore.java      |  18 ++-
 .../metrics/timeline/PhoenixHBaseAccessor.java  |  21 ++-
 .../timeline/TimelineMetricConfiguration.java   |  10 ++
 ambari-metrics/pom.xml                          |   1 -
 ambari-project/pom.xml                          |   2 +-
 ambari-server/pom.xml                           |   5 -
 .../ambari/server/agent/HeartBeatHandler.java   |  39 ++++-
 .../ambari/server/agent/RecoveryConfig.java     |  18 +--
 .../dispatchers/AlertScriptDispatcher.java      |  18 ++-
 .../services/AlertNoticeDispatchService.java    |   2 +-
 .../state/services/CachedAlertFlushService.java |   2 +-
 .../server/state/stack/JsonOsFamilyRoot.java    |  38 +++++
 .../ambari/server/state/stack/OsFamily.java     |   8 +-
 .../server/upgrade/UpgradeCatalog222.java       | 116 ++++++++++++-
 .../1.6.1.2.2.0/configuration/accumulo-site.xml |   2 +
 .../0.1.0/configuration/ams-site.xml            |  37 +++--
 .../AMBARI_METRICS/0.1.0/metainfo.xml           |   2 +
 .../0.1.0/package/scripts/metrics_collector.py  |   4 +-
 .../0.1.0/package/scripts/metrics_grafana.py    |   5 +-
 .../package/scripts/metrics_grafana_util.py     |   6 +-
 .../0.1.0/package/scripts/metrics_monitor.py    |   2 +-
 .../metrics_grafana_datasource.json.j2          |   2 +-
 .../configuration/application-properties.xml    |   1 +
 .../configuration/falcon-startup.properties.xml |   1 +
 .../common-services/HAWQ/2.0.0/metainfo.xml     |  10 ++
 .../2.0.0/package/scripts/hawq_constants.py     |   1 +
 .../HAWQ/2.0.0/package/scripts/hawqstandby.py   |   6 +-
 .../0.96.0.2.0/configuration/hbase-site.xml     |   1 +
 .../HDFS/2.1.0.2.0/configuration/core-site.xml  |   1 +
 .../HDFS/2.1.0.2.0/configuration/hdfs-site.xml  |   3 +
 .../HIVE/0.12.0.2.0/configuration/hive-site.xml |   3 +
 .../0.12.0.2.0/configuration/webhcat-site.xml   |   1 +
 .../0.8.1.2.2/configuration/kafka-broker.xml    |   2 +
 .../ranger-knox-plugin-properties.xml           |   1 +
 .../4.0.0.2.0/configuration/oozie-site.xml      |   1 +
 .../4.2.0.2.3/configuration/oozie-site.xml      |   1 +
 .../RANGER/0.4.0/configuration/ranger-env.xml   |   1 +
 .../0.5.0.2.3/configuration/kms-site.xml        |   1 +
 .../0.9.1.2.1/configuration/storm-site.xml      |   1 +
 .../configuration-mapred/mapred-site.xml        |   2 +
 .../YARN/2.1.0.2.0/configuration/yarn-site.xml  |   8 +
 .../stacks/HDP/2.0.6/services/stack_advisor.py  |   5 +-
 .../services/HIVE/configuration/hive-site.xml   |   2 +
 .../services/OOZIE/configuration/oozie-site.xml |   1 +
 .../services/YARN/configuration/yarn-site.xml   |   3 +
 .../resources/stacks/HDP/2.2/repos/repoinfo.xml |  12 --
 .../ranger-hbase-plugin-properties.xml          |   1 +
 .../ranger-hdfs-plugin-properties.xml           |   4 +-
 .../services/HIVE/configuration/hive-env.xml    |  20 ---
 .../services/HIVE/configuration/hive-site.xml   |  18 +--
 .../ranger-hive-plugin-properties.xml           |   1 +
 .../ranger-storm-plugin-properties.xml          |   1 +
 .../services/YARN/configuration/yarn-site.xml   |   2 +
 .../stacks/HDP/2.2/services/stack_advisor.py    |  13 +-
 .../resources/stacks/HDP/2.3/repos/repoinfo.xml |  12 --
 .../KAFKA/configuration/kafka-broker.xml        |   1 +
 .../ranger-kafka-plugin-properties.xml          |   1 +
 .../RANGER/configuration/ranger-env.xml         |   2 +
 .../services/STORM/configuration/storm-site.xml |   2 +
 .../resources/stacks/HDP/2.4/repos/repoinfo.xml |  12 --
 .../services/HIVE/configuration/hive-env.xml    |  20 ---
 .../services/HIVE/configuration/hive-site.xml   |   8 -
 .../stacks/HDPWIN/2.2/services/stack_advisor.py |  13 +-
 .../server/agent/TestHeartbeatHandler.java      |  78 ++++++++-
 .../dispatchers/AlertScriptDispatcherTest.java  |  61 +++++++
 .../server/upgrade/UpgradeCatalog222Test.java   |  21 ++-
 ambari-server/src/test/python/TestOSCheck.py    |  37 +++--
 .../2.1/configs/hive-metastore-upgrade.json     |   3 +-
 .../stacks/2.2/common/test_stack_advisor.py     |  17 +-
 .../2.3/common/services-sparkts-hive.json       |  47 ------
 .../stacks/2.3/common/test_stack_advisor.py     |   5 +-
 .../main/admin/service_auto_start.js            | 161 ++++++++++++++++++-
 .../manage_alert_notifications_controller.js    |  74 ++++-----
 ambari-web/app/messages.js                      |   1 +
 .../app/mixins/common/widgets/widget_mixin.js   |  49 +++++-
 ambari-web/app/routes/main.js                   |  13 +-
 ambari-web/app/styles/application.less          |  10 ++
 .../templates/main/admin/service_auto_start.hbs |  46 +++++-
 .../service_auto_start/component_auto_start.hbs |  19 +++
 .../app/templates/main/host/log_metrics.hbs     |  26 +++
 ambari-web/app/templates/main/host/summary.hbs  |  24 ++-
 ambari-web/app/utils/ajax/ajax.js               |  21 +++
 ambari-web/app/utils/ember_reopen.js            |  46 ++++++
 ambari-web/app/views.js                         |   2 +
 ambari-web/app/views/common/chart/pie.js        |  11 +-
 .../views/common/widget/graph_widget_view.js    |   4 +-
 .../app/views/main/admin/service_auto_start.js  |  14 +-
 .../service_auto_start/component_auto_start.js  |  57 +++++++
 .../views/main/dashboard/cluster_metrics/cpu.js |  10 +-
 ambari-web/app/views/main/host/log_metrics.js   | 141 ++++++++++++++++
 ambari-web/app/views/main/host/logs_view.js     |  12 ++
 ...anage_alert_notifications_controller_test.js | 121 ++++++--------
 .../test/mixins/common/widget_mixin_test.js     | 124 ++++++++++++++
 ambari-web/test/utils/ember_reopen_test.js      |  57 +++++++
 .../views/common/log_file_search_view_test.js   |   3 +-
 108 files changed, 1732 insertions(+), 665 deletions(-)
----------------------------------------------------------------------



[34/50] [abbrv] ambari git commit: Merge with trunk

Posted by nc...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/dea22be1/ambari-web/test/views/main/dashboard/widgets/hawqsegment_live_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/views/main/dashboard/widgets/hawqsegment_live_test.js b/ambari-web/test/views/main/dashboard/widgets/hawqsegment_live_test.js
new file mode 100644
index 0000000..eb8a505
--- /dev/null
+++ b/ambari-web/test/views/main/dashboard/widgets/hawqsegment_live_test.js
@@ -0,0 +1,69 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+var App = require('app');
+
+require('views/main/dashboard/widget');
+require('views/main/dashboard/widgets/text_widget');
+require('views/main/dashboard/widgets/hawqsegment_live');
+
+describe('App.HawqSegmentUpView', function() {
+
+  var tests = [
+    {
+      data: 100,
+      e: {
+        isRed: false,
+        isOrange: false,
+        isGreen: true
+      }
+    },
+    {
+      data: 0,
+      e: {
+        isRed: true,
+        isOrange: false,
+        isGreen: false
+      }
+    },
+    {
+      data: 50,
+      e: {
+        isRed: false,
+        isOrange: true,
+        isGreen: false
+      }
+    }
+  ];
+
+  tests.forEach(function(test) {
+    describe('', function() {
+      var hawqSegmentUpView = App.HawqSegmentUpView.create({model_type:null, data: test.data, content: test.data.toString()});
+      it('shows red', function() {
+        expect(hawqSegmentUpView.get('isRed')).to.equal(test.e.isRed);
+      });
+      it('shows orange', function() {
+        expect(hawqSegmentUpView.get('isOrange')).to.equal(test.e.isOrange);
+      });
+      it('shows green', function() {
+        expect(hawqSegmentUpView.get('isGreen')).to.equal(test.e.isGreen);
+      });
+    });
+  });
+
+});


[26/50] [abbrv] ambari git commit: AMBARI-14978. Create Upgrade Packs for Patch Upgrades (ncole)

Posted by nc...@apache.org.
AMBARI-14978. Create Upgrade Packs for Patch Upgrades (ncole)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/03bc2260
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/03bc2260
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/03bc2260

Branch: refs/heads/trunk
Commit: 03bc22607d533896f44ee3d60e29aa2a08ffe31e
Parents: 1809161
Author: Nate Cole <nc...@hortonworks.com>
Authored: Wed Feb 10 15:55:25 2016 -0500
Committer: Nate Cole <nc...@hortonworks.com>
Committed: Thu Feb 11 08:47:10 2016 -0500

----------------------------------------------------------------------
 .../internal/UpgradeResourceProvider.java       |  34 ++-
 .../ambari/server/state/UpgradeContext.java     |  11 +
 .../ambari/server/state/UpgradeHelper.java      |   5 +
 .../state/stack/upgrade/ClusterGrouping.java    |  19 +-
 .../state/stack/upgrade/ColocatedGrouping.java  |  36 ++-
 .../server/state/stack/upgrade/Grouping.java    |  47 ++--
 .../stack/upgrade/StageWrapperBuilder.java      |  41 ++-
 .../ambari/server/state/stack/upgrade/Task.java |   7 +
 .../state/stack/upgrade/UpgradeScope.java       |  60 +++++
 .../HDP/2.2/upgrades/nonrolling-upgrade-2.2.xml |   1 +
 .../stacks/HDP/2.2/upgrades/upgrade-2.2.xml     |   1 +
 .../HDP/2.3/upgrades/nonrolling-upgrade-2.3.xml |   1 +
 .../stacks/HDP/2.3/upgrades/upgrade-2.3.xml     |   1 +
 .../HDP/2.4/upgrades/nonrolling-upgrade-2.4.xml |   1 +
 .../stacks/HDP/2.4/upgrades/upgrade-2.4.xml     |   1 +
 .../src/main/resources/version_definition.xsd   |   1 +
 .../AmbariManagementControllerTest.java         |   8 +-
 .../ambari/server/state/UpgradeHelperTest.java  | 100 ++++---
 .../HDP/2.1.1/upgrades/upgrade_test_partial.xml | 259 +++++++++++++++++++
 19 files changed, 535 insertions(+), 99 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/03bc2260/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/UpgradeResourceProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/UpgradeResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/UpgradeResourceProvider.java
index db8c079..40565c5 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/UpgradeResourceProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/UpgradeResourceProvider.java
@@ -106,6 +106,7 @@ import org.apache.ambari.server.state.stack.upgrade.StageWrapper;
 import org.apache.ambari.server.state.stack.upgrade.Task;
 import org.apache.ambari.server.state.stack.upgrade.TaskWrapper;
 import org.apache.ambari.server.state.stack.upgrade.UpdateStackGrouping;
+import org.apache.ambari.server.state.stack.upgrade.UpgradeScope;
 import org.apache.ambari.server.state.stack.upgrade.UpgradeType;
 import org.apache.ambari.server.state.svccomphost.ServiceComponentHostServerActionEvent;
 import org.apache.commons.collections.CollectionUtils;
@@ -304,9 +305,9 @@ public class UpgradeResourceProvider extends AbstractControllerResourceProvider
         String forceDowngrade = requestInfoProps.get(UpgradeResourceDefinition.DOWNGRADE_DIRECTIVE);
         String clusterName = (String) requestMap.get(UPGRADE_CLUSTER_NAME);
 
-          if (null == clusterName) {
-              throw new AmbariException(String.format("%s is required", UPGRADE_CLUSTER_NAME));
-           }
+        if (null == clusterName) {
+          throw new AmbariException(String.format("%s is required", UPGRADE_CLUSTER_NAME));
+        }
 
         Cluster cluster = getManagementController().getClusters().getCluster(clusterName);
         Direction direction = Boolean.parseBoolean(forceDowngrade) ? Direction.DOWNGRADE
@@ -315,15 +316,17 @@ public class UpgradeResourceProvider extends AbstractControllerResourceProvider
         UpgradePack up = validateRequest(direction, requestMap);
 
         try {
-              return createUpgrade(cluster, direction, up, requestMap);
-            } catch (Exception e){
-              LOG.error("Error appears during upgrade task submitting", e);
-
-                // Any error caused in the createUpgrade will initiate transaction rollback
-                  // As we operate inside with cluster data, any cache which belongs to cluster need to be flushed
-                    cluster.invalidateData();
-              throw e;
-            }
+          return createUpgrade(cluster, direction, up, requestMap);
+        } catch (Exception e) {
+          LOG.error("Error appears during upgrade task submitting", e);
+
+          // Any error caused in the createUpgrade will initiate transaction
+          // rollback
+          // As we operate inside with cluster data, any cache which belongs to
+          // cluster need to be flushed
+          cluster.invalidateData();
+          throw e;
+        }
       }
     });
 
@@ -706,6 +709,7 @@ public class UpgradeResourceProvider extends AbstractControllerResourceProvider
     StackId targetStackId = null;
 
     Set<String> supportedServices = new HashSet<>();
+    UpgradeScope scope = UpgradeScope.COMPLETE;
 
     switch (direction) {
       case UPGRADE:
@@ -733,6 +737,10 @@ public class UpgradeResourceProvider extends AbstractControllerResourceProvider
             for (AvailableService available : services) {
               supportedServices.add(available.getName());
             }
+
+            if (!services.isEmpty()) {
+              scope = UpgradeScope.PARTIAL;
+            }
           }
         }
 
@@ -747,7 +755,7 @@ public class UpgradeResourceProvider extends AbstractControllerResourceProvider
     UpgradeContext ctx = new UpgradeContext(resolver, sourceStackId, targetStackId, version,
         direction, pack.getType());
     ctx.setSupportedServices(supportedServices);
-
+    ctx.setScope(scope);
 
     if (direction.isDowngrade()) {
       if (requestMap.containsKey(UPGRADE_FROM_VERSION)) {

http://git-wip-us.apache.org/repos/asf/ambari/blob/03bc2260/ambari-server/src/main/java/org/apache/ambari/server/state/UpgradeContext.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/UpgradeContext.java b/ambari-server/src/main/java/org/apache/ambari/server/state/UpgradeContext.java
index 05aecac..49b2b45 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/UpgradeContext.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/UpgradeContext.java
@@ -27,6 +27,7 @@ import java.util.Set;
 import org.apache.ambari.server.api.services.AmbariMetaInfo;
 import org.apache.ambari.server.stack.MasterHostResolver;
 import org.apache.ambari.server.state.stack.upgrade.Direction;
+import org.apache.ambari.server.state.stack.upgrade.UpgradeScope;
 import org.apache.ambari.server.state.stack.upgrade.UpgradeType;
 
 /**
@@ -88,6 +89,8 @@ public class UpgradeContext {
 
   private Set<String> m_supported = new HashSet<>();
 
+  private UpgradeScope m_scope = UpgradeScope.ANY;
+
   /**
    * Constructor.
    *
@@ -378,4 +381,12 @@ public class UpgradeContext {
 
     return false;
   }
+
+  public void setScope(UpgradeScope scope) {
+    m_scope = scope;
+  }
+
+  public boolean isScoped(UpgradeScope scope) {
+    return m_scope.isScoped(scope);
+  }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/03bc2260/ambari-server/src/main/java/org/apache/ambari/server/state/UpgradeHelper.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/UpgradeHelper.java b/ambari-server/src/main/java/org/apache/ambari/server/state/UpgradeHelper.java
index 0d9176d..5143bfa 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/UpgradeHelper.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/UpgradeHelper.java
@@ -270,6 +270,11 @@ public class UpgradeHelper {
 
     for (Grouping group : upgradePack.getGroups(context.getDirection())) {
 
+      // !!! grouping is not scoped to context
+      if (!context.isScoped(group.scope)) {
+        continue;
+      }
+
       UpgradeGroupHolder groupHolder = new UpgradeGroupHolder();
       groupHolder.name = group.name;
       groupHolder.title = group.title;

http://git-wip-us.apache.org/repos/asf/ambari/blob/03bc2260/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/ClusterGrouping.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/ClusterGrouping.java b/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/ClusterGrouping.java
index 5e21da5..49bf372 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/ClusterGrouping.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/ClusterGrouping.java
@@ -39,6 +39,7 @@ import org.apache.ambari.server.state.Host;
 import org.apache.ambari.server.state.MaintenanceState;
 import org.apache.ambari.server.state.UpgradeContext;
 import org.apache.ambari.server.state.stack.UpgradePack.ProcessingComponent;
+import org.apache.commons.lang.StringUtils;
 
 import com.google.gson.JsonArray;
 import com.google.gson.JsonObject;
@@ -96,6 +97,10 @@ public class ClusterGrouping extends Grouping {
 
     @XmlElement(name="task")
     public Task task;
+
+    @XmlElement(name="scope")
+    public UpgradeScope scope = UpgradeScope.ANY;
+
   }
 
   public class ClusterBuilder extends StageWrapperBuilder {
@@ -215,8 +220,18 @@ public class ClusterGrouping extends Grouping {
     String component = execution.component;
     ExecuteTask et = (ExecuteTask) execution.task;
 
-    if (null != service && !service.isEmpty() &&
-        null != component && !component.isEmpty()) {
+    if (StringUtils.isNotBlank(service) && StringUtils.isNotBlank(component)) {
+
+      // !!! if the context is not scoped for the execute-stage, bail
+      if (!ctx.isScoped(execution.scope)) {
+        return null;
+      }
+
+      // !!! if the context is targeted and does not include the service, bail
+      if (!ctx.isServiceSupported(service)) {
+        return null;
+      }
+
 
       HostsType hosts = ctx.getResolver().getMasterAndHosts(service, component);
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/03bc2260/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/ColocatedGrouping.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/ColocatedGrouping.java b/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/ColocatedGrouping.java
index 11e9267..3705c43 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/ColocatedGrouping.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/ColocatedGrouping.java
@@ -80,11 +80,9 @@ public class ColocatedGrouping extends Grouping {
     }
 
     @Override
-    public void add(UpgradeContext ctx, HostsType hostsType, String service,
+    public void add(UpgradeContext context, HostsType hostsType, String service,
         boolean clientOnly, ProcessingComponent pc, Map<String, String> params) {
 
-      boolean forUpgrade = ctx.getDirection().isUpgrade();
-
       int count = Double.valueOf(Math.ceil(
           (double) m_batch.percent / 100 * hostsType.hosts.size())).intValue();
 
@@ -103,13 +101,13 @@ public class ColocatedGrouping extends Grouping {
 
         TaskProxy proxy = null;
 
-        List<Task> tasks = resolveTasks(forUpgrade, true, pc);
+        List<Task> tasks = resolveTasks(context, true, pc);
 
         if (null != tasks && tasks.size() > 0) {
           proxy = new TaskProxy();
           proxy.clientOnly = clientOnly;
           proxy.message = getStageText("Preparing",
-              ctx.getComponentDisplay(service, pc.name), Collections.singleton(host));
+              context.getComponentDisplay(service, pc.name), Collections.singleton(host));
           proxy.tasks.addAll(TaskWrapperBuilder.getTaskList(service, pc.name, singleHostsType, tasks, params));
           proxy.service = service;
           proxy.component = pc.name;
@@ -117,22 +115,20 @@ public class ColocatedGrouping extends Grouping {
         }
 
         // !!! FIXME upgrade definition have only one step, and it better be a restart
-        if (null != pc.tasks && 1 == pc.tasks.size()) {
-          Task t = pc.tasks.get(0);
-          if (RestartTask.class.isInstance(t)) {
-            proxy = new TaskProxy();
-            proxy.clientOnly = clientOnly;
-            proxy.tasks.add(new TaskWrapper(service, pc.name, Collections.singleton(host), params, t));
-            proxy.restart = true;
-            proxy.service = service;
-            proxy.component = pc.name;
-            proxy.message = getStageText("Restarting",
-                ctx.getComponentDisplay(service, pc.name), Collections.singleton(host));
-            targetList.add(proxy);
-          }
+        Task t = resolveTask(context, pc);
+        if (null != t && RestartTask.class.isInstance(t)) {
+          proxy = new TaskProxy();
+          proxy.clientOnly = clientOnly;
+          proxy.tasks.add(new TaskWrapper(service, pc.name, Collections.singleton(host), params, t));
+          proxy.restart = true;
+          proxy.service = service;
+          proxy.component = pc.name;
+          proxy.message = getStageText("Restarting",
+              context.getComponentDisplay(service, pc.name), Collections.singleton(host));
+          targetList.add(proxy);
         }
 
-        tasks = resolveTasks(forUpgrade, false, pc);
+        tasks = resolveTasks(context, false, pc);
 
         if (null != tasks && tasks.size() > 0) {
           proxy = new TaskProxy();
@@ -141,7 +137,7 @@ public class ColocatedGrouping extends Grouping {
           proxy.service = service;
           proxy.tasks.addAll(TaskWrapperBuilder.getTaskList(service, pc.name, singleHostsType, tasks, params));
           proxy.message = getStageText("Completing",
-              ctx.getComponentDisplay(service, pc.name), Collections.singleton(host));
+              context.getComponentDisplay(service, pc.name), Collections.singleton(host));
           targetList.add(proxy);
         }
       }

http://git-wip-us.apache.org/repos/asf/ambari/blob/03bc2260/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/Grouping.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/Grouping.java b/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/Grouping.java
index 5af893e..67d7fdb 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/Grouping.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/Grouping.java
@@ -17,16 +17,6 @@
  */
 package org.apache.ambari.server.state.stack.upgrade;
 
-import org.apache.ambari.server.stack.HostsType;
-import org.apache.ambari.server.state.UpgradeContext;
-import org.apache.ambari.server.state.stack.UpgradePack;
-import org.apache.ambari.server.state.stack.UpgradePack.ProcessingComponent;
-import org.apache.ambari.server.utils.SetUtils;
-import org.apache.commons.lang.StringUtils;
-
-import javax.xml.bind.annotation.XmlAttribute;
-import javax.xml.bind.annotation.XmlElement;
-import javax.xml.bind.annotation.XmlSeeAlso;
 import java.util.ArrayList;
 import java.util.Collections;
 import java.util.HashSet;
@@ -34,6 +24,17 @@ import java.util.List;
 import java.util.Map;
 import java.util.Set;
 
+import javax.xml.bind.annotation.XmlAttribute;
+import javax.xml.bind.annotation.XmlElement;
+import javax.xml.bind.annotation.XmlSeeAlso;
+
+import org.apache.ambari.server.stack.HostsType;
+import org.apache.ambari.server.state.UpgradeContext;
+import org.apache.ambari.server.state.stack.UpgradePack;
+import org.apache.ambari.server.state.stack.UpgradePack.ProcessingComponent;
+import org.apache.ambari.server.utils.SetUtils;
+import org.apache.commons.lang.StringUtils;
+
 /**
  *
  */
@@ -67,6 +68,9 @@ public class Grouping {
   @XmlElement(name="parallel-scheduler")
   public ParallelScheduler parallelScheduler;
 
+  @XmlElement(name="scope")
+  public UpgradeScope scope = UpgradeScope.ANY;
+
   /**
    * Gets the default builder.
    */
@@ -88,44 +92,43 @@ public class Grouping {
     /**
      * Add stages where the restart stages are ordered
      * E.g., preupgrade, restart hosts(0), ..., restart hosts(n-1), postupgrade
+     * @param context the context
      * @param hostsType the order collection of hosts, which may have a master and secondary
      * @param service the service name
      * @param pc the ProcessingComponent derived from the upgrade pack.
      * @param params additional parameters
      */
     @Override
-    public void add(UpgradeContext ctx, HostsType hostsType, String service,
+    public void add(UpgradeContext context, HostsType hostsType, String service,
        boolean clientOnly, ProcessingComponent pc, Map<String, String> params) {
 
-      boolean forUpgrade = ctx.getDirection().isUpgrade();
-
       // Construct the pre tasks during Upgrade/Downgrade direction.
       // Buckets are grouped by the type, e.g., bucket of all Execute tasks, or all Configure tasks.
-      List<TaskBucket> buckets = buckets(resolveTasks(forUpgrade, true, pc));
+      List<TaskBucket> buckets = buckets(resolveTasks(context, true, pc));
       for (TaskBucket bucket : buckets) {
         // The TaskWrappers take into account if a task is meant to run on all, any, or master.
         // A TaskWrapper may contain multiple tasks, but typically only one, and they all run on the same set of hosts.
         List<TaskWrapper> preTasks = TaskWrapperBuilder.getTaskList(service, pc.name, hostsType, bucket.tasks, params);
         List<List<TaskWrapper>> organizedTasks = organizeTaskWrappersBySyncRules(preTasks);
         for (List<TaskWrapper> tasks : organizedTasks) {
-          addTasksToStageInBatches(tasks, "Preparing", ctx, service, pc, params);
+          addTasksToStageInBatches(tasks, "Preparing", context, service, pc, params);
         }
       }
 
       // Add the processing component
-      if (null != pc.tasks && 1 == pc.tasks.size()) {
-        Task t = pc.tasks.get(0);
+      Task t = resolveTask(context, pc);
+      if (null != t) {
         TaskWrapper tw = new TaskWrapper(service, pc.name, hostsType.hosts, params, Collections.singletonList(t));
-        addTasksToStageInBatches(Collections.singletonList(tw), t.getActionVerb(), ctx, service, pc, params);
+        addTasksToStageInBatches(Collections.singletonList(tw), t.getActionVerb(), context, service, pc, params);
       }
 
       // Construct the post tasks during Upgrade/Downgrade direction.
-      buckets = buckets(resolveTasks(forUpgrade, false, pc));
+      buckets = buckets(resolveTasks(context, false, pc));
       for (TaskBucket bucket : buckets) {
         List<TaskWrapper> postTasks = TaskWrapperBuilder.getTaskList(service, pc.name, hostsType, bucket.tasks, params);
         List<List<TaskWrapper>> organizedTasks = organizeTaskWrappersBySyncRules(postTasks);
         for (List<TaskWrapper> tasks : organizedTasks) {
-          addTasksToStageInBatches(tasks, "Completing", ctx, service, pc, params);
+          addTasksToStageInBatches(tasks, "Completing", context, service, pc, params);
         }
       }
 
@@ -157,7 +160,7 @@ public class Grouping {
           subTasks.add(tw);
         }
       }
-      
+
       if (!subTasks.isEmpty()) {
         groupedTasks.add(subTasks);
       }
@@ -208,7 +211,7 @@ public class Grouping {
         }
       }
     }
-    
+
     /**
      * Determine if service checks need to be ran after the stages.
      * @param upgradeContext the upgrade context

http://git-wip-us.apache.org/repos/asf/ambari/blob/03bc2260/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/StageWrapperBuilder.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/StageWrapperBuilder.java b/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/StageWrapperBuilder.java
index e96c7ff..ec7279c 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/StageWrapperBuilder.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/StageWrapperBuilder.java
@@ -195,23 +195,56 @@ public abstract class StageWrapperBuilder {
    *   <li>When performing a downgrade, use the downgrade tasks if they are defined</li>
    *   <li>When performing a downgrade, but no downgrade tasks exist, reuse the upgrade tasks</li>
    * </ul>
-   * @param forUpgrade  {@code true} if resolving for an upgrade, {@code false} for downgrade
+   * @param context     the upgrade context
    * @param preTasks    {@code true} if loading pre-upgrade or pre-downgrade
    * @param pc          the processing component holding task definitions
    * @return A collection, potentially empty, of the tasks to run, which may contain either
    * pre or post tasks if they exist, and the order depends on whether it's an upgrade or downgrade.
    */
-  protected List<Task> resolveTasks(boolean forUpgrade, boolean preTasks, ProcessingComponent pc) {
+  protected List<Task> resolveTasks(final UpgradeContext context, boolean preTasks, ProcessingComponent pc) {
     if (null == pc) {
       return Collections.emptyList();
     }
 
+    boolean forUpgrade = context.getDirection().isUpgrade();
+
+    final List<Task> interim;
+
     if (forUpgrade) {
-      return preTasks ? pc.preTasks : pc.postTasks;
+      interim = preTasks ? pc.preTasks : pc.postTasks;
     } else {
-      return preTasks ?
+      interim = preTasks ?
         (null == pc.preDowngradeTasks ? pc.preTasks : pc.preDowngradeTasks) :
         (null == pc.postDowngradeTasks ? pc.postTasks : pc.postDowngradeTasks);
     }
+
+    if (null == interim || interim.isEmpty()) {
+      return Collections.emptyList();
+    }
+
+    List<Task> tasks = new ArrayList<>();
+    for (Task t : interim) {
+      if (context.isScoped(t.scope)) {
+        tasks.add(t);
+      }
+    }
+
+    return tasks;
+  }
+
+  /**
+   * The upgrade packs are written such that there is one and only one upgrade element
+   * for a component, all other directives go in (pre|post)-(upgrade|downgrade) elements.
+   * @param pc the processing component
+   * @return the single task, or {@code null} if there is none
+   */
+  protected Task resolveTask(UpgradeContext context, ProcessingComponent pc) {
+    if (null != pc.tasks && 1 == pc.tasks.size()) {
+      if (context.isScoped(pc.tasks.get(0).scope)) {
+        return pc.tasks.get(0);
+      }
+    }
+
+    return null;
   }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/03bc2260/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/Task.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/Task.java b/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/Task.java
index f365a7f..5c43c2b 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/Task.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/Task.java
@@ -56,6 +56,13 @@ public abstract class Task {
    */
   public abstract String getActionVerb();
 
+  /**
+   * The scope for the task
+   */
+  @XmlElement(name = "scope")
+  public UpgradeScope scope = UpgradeScope.ANY;
+
+
   @Override
   public String toString() {
     return getType().toString();

http://git-wip-us.apache.org/repos/asf/ambari/blob/03bc2260/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/UpgradeScope.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/UpgradeScope.java b/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/UpgradeScope.java
new file mode 100644
index 0000000..f35bd68
--- /dev/null
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/UpgradeScope.java
@@ -0,0 +1,60 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.server.state.stack.upgrade;
+
+import javax.xml.bind.annotation.XmlEnum;
+import javax.xml.bind.annotation.XmlEnumValue;
+
+import com.google.gson.annotations.SerializedName;
+
+/**
+ * Indicates the scope of a group or task
+ */
+@XmlEnum
+public enum UpgradeScope {
+
+  /**
+   * Used only when completely upgrading the cluster.
+   */
+  @XmlEnumValue("COMPLETE")
+  @SerializedName("rolling_upgrade")
+  COMPLETE,
+
+  /**
+   * Used only when partially upgrading the cluster.
+   */
+  @XmlEnumValue("PARTIAL")
+  @SerializedName("partial")
+  PARTIAL,
+
+  /**
+   * Used for any scoped upgrade.
+   */
+  @XmlEnumValue("ANY")
+  @SerializedName("any")
+  ANY;
+
+  public boolean isScoped(UpgradeScope scope) {
+    if (ANY == this || ANY == scope) {
+      return true;
+    }
+
+    return this == scope;
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/03bc2260/ambari-server/src/main/resources/stacks/HDP/2.2/upgrades/nonrolling-upgrade-2.2.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2/upgrades/nonrolling-upgrade-2.2.xml b/ambari-server/src/main/resources/stacks/HDP/2.2/upgrades/nonrolling-upgrade-2.2.xml
index 4fd3316..b82f786 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.2/upgrades/nonrolling-upgrade-2.2.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2/upgrades/nonrolling-upgrade-2.2.xml
@@ -524,6 +524,7 @@
     that are installed on the hosts but not known by Ambari.
     -->
     <group xsi:type="cluster" name="ALL_HOST_OPS" title="Set Version On All Hosts">
+      <scope>COMPLETE</scope>
       <skippable>true</skippable>
       <supports-auto-skip-failure>false</supports-auto-skip-failure>
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/03bc2260/ambari-server/src/main/resources/stacks/HDP/2.2/upgrades/upgrade-2.2.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2/upgrades/upgrade-2.2.xml b/ambari-server/src/main/resources/stacks/HDP/2.2/upgrades/upgrade-2.2.xml
index 38ee39b..7b635bf 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.2/upgrades/upgrade-2.2.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2/upgrades/upgrade-2.2.xml
@@ -332,6 +332,7 @@
     </group>
 
     <group xsi:type="cluster" name="ALL_HOST_OPS" title="Finalize Hosts">
+      <scope>COMPLETE</scope>
       <execute-stage title="Update remaining HDP stack to {{version}}">
         <task xsi:type="execute">
           <script>scripts/ru_set_all.py</script>

http://git-wip-us.apache.org/repos/asf/ambari/blob/03bc2260/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.3.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.3.xml b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.3.xml
index ac84443..2d54038 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.3.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.3.xml
@@ -605,6 +605,7 @@
     that are installed on the hosts but not known by Ambari.
     -->
     <group xsi:type="cluster" name="ALL_HOST_OPS" title="Set Version On All Hosts">
+      <scope>COMPLETE</scope>
       <skippable>true</skippable>
       <supports-auto-skip-failure>false</supports-auto-skip-failure>
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/03bc2260/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.3.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.3.xml b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.3.xml
index 508483e..52a735b 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.3.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.3.xml
@@ -393,6 +393,7 @@
     </group>
 
     <group xsi:type="cluster" name="ALL_HOST_OPS" title="Finalize Hosts">
+      <scope>COMPLETE</scope>
       <execute-stage title="Update remaining HDP stack to {{version}}">
         <task xsi:type="execute">
           <script>scripts/ru_set_all.py</script>

http://git-wip-us.apache.org/repos/asf/ambari/blob/03bc2260/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/nonrolling-upgrade-2.4.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/nonrolling-upgrade-2.4.xml b/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/nonrolling-upgrade-2.4.xml
index 3863877..622a737 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/nonrolling-upgrade-2.4.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/nonrolling-upgrade-2.4.xml
@@ -600,6 +600,7 @@
     that are installed on the hosts but not known by Ambari.
     -->
     <group xsi:type="cluster" name="ALL_HOST_OPS" title="Set Version On All Hosts">
+      <scope>COMPLETE</scope>
       <skippable>true</skippable>
       <supports-auto-skip-failure>false</supports-auto-skip-failure>
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/03bc2260/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/upgrade-2.4.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/upgrade-2.4.xml b/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/upgrade-2.4.xml
index e45e851..a73dc92 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/upgrade-2.4.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/upgrade-2.4.xml
@@ -391,6 +391,7 @@
     </group>
 
     <group xsi:type="cluster" name="ALL_HOST_OPS" title="Finalize Hosts">
+      <scope>COMPLETE</scope>
       <execute-stage title="Update remaining HDP stack to {{version}}">
         <task xsi:type="execute">
           <script>scripts/ru_set_all.py</script>

http://git-wip-us.apache.org/repos/asf/ambari/blob/03bc2260/ambari-server/src/main/resources/version_definition.xsd
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/version_definition.xsd b/ambari-server/src/main/resources/version_definition.xsd
index 77b4203..42035de 100644
--- a/ambari-server/src/main/resources/version_definition.xsd
+++ b/ambari-server/src/main/resources/version_definition.xsd
@@ -54,6 +54,7 @@
       <xs:enumeration value="ubuntu12" />
       <xs:enumeration value="ubuntu14" />
       <xs:enumeration value="suse11" />
+      <xs:enumeration value="amazon2015" />
     </xs:restriction>
   </xs:simpleType>
   

http://git-wip-us.apache.org/repos/asf/ambari/blob/03bc2260/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java
index a0ac966..3e372f5 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java
@@ -36,9 +36,6 @@ import static org.junit.Assert.fail;
 
 import java.io.StringReader;
 import java.lang.reflect.Type;
-import java.net.ConnectException;
-import java.net.MalformedURLException;
-import java.net.UnknownHostException;
 import java.text.MessageFormat;
 import java.util.ArrayList;
 import java.util.Arrays;
@@ -107,7 +104,6 @@ import org.apache.ambari.server.orm.entities.WidgetLayoutUserWidgetEntity;
 import org.apache.ambari.server.security.TestAuthenticationFactory;
 import org.apache.ambari.server.security.authorization.AuthorizationException;
 import org.apache.ambari.server.security.authorization.Users;
-import org.apache.ambari.server.security.authorization.internal.InternalAuthenticationToken;
 import org.apache.ambari.server.serveraction.ServerAction;
 import org.apache.ambari.server.state.Cluster;
 import org.apache.ambari.server.state.Clusters;
@@ -156,6 +152,7 @@ import org.junit.Test;
 import org.junit.rules.ExpectedException;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
+import org.springframework.security.core.context.SecurityContextHolder;
 
 import com.google.gson.Gson;
 import com.google.gson.reflect.TypeToken;
@@ -165,7 +162,6 @@ import com.google.inject.Injector;
 import com.google.inject.persist.PersistService;
 
 import junit.framework.Assert;
-import org.springframework.security.core.context.SecurityContextHolder;
 
 public class AmbariManagementControllerTest {
 
@@ -7183,7 +7179,7 @@ public class AmbariManagementControllerTest {
     Assert.assertEquals(1, responsesWithParams.size());
     StackVersionResponse resp = responsesWithParams.iterator().next();
     assertNotNull(resp.getUpgradePacks());
-    assertEquals(8, resp.getUpgradePacks().size());
+    assertEquals(9, resp.getUpgradePacks().size());
     assertTrue(resp.getUpgradePacks().contains("upgrade_test"));
   }
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/03bc2260/ambari-server/src/test/java/org/apache/ambari/server/state/UpgradeHelperTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/state/UpgradeHelperTest.java b/ambari-server/src/test/java/org/apache/ambari/server/state/UpgradeHelperTest.java
index b15157e..d513448 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/state/UpgradeHelperTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/state/UpgradeHelperTest.java
@@ -60,6 +60,7 @@ import org.apache.ambari.server.state.stack.upgrade.ManualTask;
 import org.apache.ambari.server.state.stack.upgrade.StageWrapper;
 import org.apache.ambari.server.state.stack.upgrade.Task;
 import org.apache.ambari.server.state.stack.upgrade.TaskWrapper;
+import org.apache.ambari.server.state.stack.upgrade.UpgradeScope;
 import org.apache.ambari.server.state.stack.upgrade.UpgradeType;
 import org.apache.ambari.server.utils.EventBusSynchronizer;
 import org.easymock.EasyMock;
@@ -133,26 +134,6 @@ public class UpgradeHelperTest {
     m_masterHostResolver = EasyMock.createMock(MasterHostResolver.class);
     m_managementController = injector.getInstance(AmbariManagementController.class);
 
-//    StackDAO stackDAO = injector.getInstance(StackDAO.class);
-//    StackEntity stackEntity = new StackEntity();
-//    stackEntity.setStackName("HDP");
-//    stackEntity.setStackVersion("2.1");
-//    stackDAO.create(stackEntity);
-//
-//    StackEntity stackEntityTo = new StackEntity();
-//    stackEntityTo.setStackName("HDP");
-//    stackEntityTo.setStackVersion("2.2");
-//    stackDAO.create(stackEntityTo);
-//
-//    Clusters clusters = injector.getInstance(Clusters.class);
-//    clusters.addCluster("c1", new StackId("HDP", "2.1"));
-//
-//    RepositoryVersionDAO repositoryVersionDAO = injector.getInstance(RepositoryVersionDAO.class);
-//    repositoryVersionDAO.create(stackEntity, "2.1.1", "2.1.1", "");
-//    repositoryVersionDAO.create(stackEntityTo, "2.2.0", "2.2.0", "");
-//
-//    replay(m_configHelper);
-
     // Set the authenticated user
     // TODO: remove this or replace the authenticated user to test authorization rules
     SecurityContextHolder.getContext().setAuthentication(TestAuthenticationFactory.createAdministrator("admin"));
@@ -260,7 +241,7 @@ public class UpgradeHelperTest {
   }
 
   @Test
-  public void testSupportedServiceUpgradeOrchestration() throws Exception {
+  public void testPartialUpgradeOrchestration() throws Exception {
     Map<String, UpgradePack> upgrades = ambariMetaInfo.getUpgradePacks("foo", "bar");
     assertTrue(upgrades.isEmpty());
 
@@ -271,8 +252,8 @@ public class UpgradeHelperTest {
     ComponentInfo ci = si.getComponentByName("ZOOKEEPER_SERVER");
     ci.setDisplayName("ZooKeeper1 Server2");
 
-    assertTrue(upgrades.containsKey("upgrade_test"));
-    UpgradePack upgrade = upgrades.get("upgrade_test");
+    assertTrue(upgrades.containsKey("upgrade_test_partial"));
+    UpgradePack upgrade = upgrades.get("upgrade_test_partial");
     assertNotNull(upgrade);
 
     makeCluster();
@@ -280,7 +261,11 @@ public class UpgradeHelperTest {
     UpgradeContext context = new UpgradeContext(m_masterHostResolver, HDP_21,
         HDP_21, UPGRADE_VERSION, Direction.UPGRADE, UpgradeType.ROLLING);
     context.setSupportedServices(Collections.singleton("ZOOKEEPER"));
+    context.setScope(UpgradeScope.PARTIAL);
 
+    List<Grouping> groupings = upgrade.getGroups(Direction.UPGRADE);
+    assertEquals(8, groupings.size());
+    assertEquals(UpgradeScope.COMPLETE, groupings.get(6).scope);
 
     List<UpgradeGroupHolder> groups = m_upgradeHelper.createSequence(upgrade, context);
 
@@ -290,26 +275,77 @@ public class UpgradeHelperTest {
     assertEquals("ZOOKEEPER", groups.get(1).name);
     assertEquals("POST_CLUSTER", groups.get(2).name);
 
-
     UpgradeGroupHolder group = groups.get(1);
     // check that the display name is being used
     assertTrue(group.items.get(1).getText().contains("ZooKeeper1 Server2"));
-    assertEquals(group.items.get(5).getText(), "Service Check Zk");
+    assertEquals("Service Check Zk", group.items.get(6).getText());
 
     UpgradeGroupHolder postGroup = groups.get(2);
     assertEquals("POST_CLUSTER", postGroup.name);
     assertEquals("Finalize Upgrade", postGroup.title);
-    assertEquals(3, postGroup.items.size());
+    assertEquals(2, postGroup.items.size());
     assertEquals("Confirm Finalize", postGroup.items.get(0).getText());
-    assertEquals("Execute HDFS Finalize", postGroup.items.get(1).getText());
-    assertEquals("Save Cluster State", postGroup.items.get(2).getText());
-    assertEquals(StageWrapper.Type.SERVER_SIDE_ACTION, postGroup.items.get(2).getType());
+    assertEquals("Save Cluster State", postGroup.items.get(1).getText());
+    assertEquals(StageWrapper.Type.SERVER_SIDE_ACTION, postGroup.items.get(1).getType());
 
-    assertEquals(4, groups.get(0).items.size());
-    assertEquals(6, groups.get(1).items.size());
-    assertEquals(3, groups.get(2).items.size());
+    assertEquals(3, groups.get(0).items.size());
+    assertEquals(7, groups.get(1).items.size());
+    assertEquals(2, groups.get(2).items.size());
   }
 
+  @Test
+  public void testCompleteUpgradeOrchestration() throws Exception {
+    Map<String, UpgradePack> upgrades = ambariMetaInfo.getUpgradePacks("foo", "bar");
+    assertTrue(upgrades.isEmpty());
+
+    upgrades = ambariMetaInfo.getUpgradePacks("HDP", "2.1.1");
+
+    ServiceInfo si = ambariMetaInfo.getService("HDP", "2.1.1", "ZOOKEEPER");
+    si.setDisplayName("Zk");
+    ComponentInfo ci = si.getComponentByName("ZOOKEEPER_SERVER");
+    ci.setDisplayName("ZooKeeper1 Server2");
+
+    assertTrue(upgrades.containsKey("upgrade_test_partial"));
+    UpgradePack upgrade = upgrades.get("upgrade_test_partial");
+    assertNotNull(upgrade);
+
+    makeCluster();
+
+    UpgradeContext context = new UpgradeContext(m_masterHostResolver, HDP_21,
+        HDP_21, UPGRADE_VERSION, Direction.UPGRADE, UpgradeType.ROLLING);
+    context.setSupportedServices(Collections.singleton("ZOOKEEPER"));
+    context.setScope(UpgradeScope.COMPLETE);
+
+    List<Grouping> groupings = upgrade.getGroups(Direction.UPGRADE);
+    assertEquals(8, groupings.size());
+    assertEquals(UpgradeScope.COMPLETE, groupings.get(6).scope);
+
+    List<UpgradeGroupHolder> groups = m_upgradeHelper.createSequence(upgrade, context);
+
+    assertEquals(4, groups.size());
+
+    assertEquals("PRE_CLUSTER", groups.get(0).name);
+    assertEquals("ZOOKEEPER", groups.get(1).name);
+    assertEquals("ALL_HOSTS", groups.get(2).name);
+    assertEquals("POST_CLUSTER", groups.get(3).name);
+
+    UpgradeGroupHolder group = groups.get(1);
+    // check that the display name is being used
+    assertTrue(group.items.get(1).getText().contains("ZooKeeper1 Server2"));
+    assertEquals("Service Check Zk", group.items.get(5).getText());
+
+    UpgradeGroupHolder postGroup = groups.get(3);
+    assertEquals("POST_CLUSTER", postGroup.name);
+    assertEquals("Finalize Upgrade", postGroup.title);
+    assertEquals(2, postGroup.items.size());
+    assertEquals("Confirm Finalize", postGroup.items.get(0).getText());
+    assertEquals("Save Cluster State", postGroup.items.get(1).getText());
+    assertEquals(StageWrapper.Type.SERVER_SIDE_ACTION, postGroup.items.get(1).getType());
+
+    assertEquals(3, groups.get(0).items.size());
+    assertEquals(6, groups.get(1).items.size());
+    assertEquals(1, groups.get(2).items.size());
+  }
 
   @Test
   public void testUpgradeServerActionOrchestration() throws Exception {

http://git-wip-us.apache.org/repos/asf/ambari/blob/03bc2260/ambari-server/src/test/resources/stacks/HDP/2.1.1/upgrades/upgrade_test_partial.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/resources/stacks/HDP/2.1.1/upgrades/upgrade_test_partial.xml b/ambari-server/src/test/resources/stacks/HDP/2.1.1/upgrades/upgrade_test_partial.xml
new file mode 100644
index 0000000..4932e92
--- /dev/null
+++ b/ambari-server/src/test/resources/stacks/HDP/2.1.1/upgrades/upgrade_test_partial.xml
@@ -0,0 +1,259 @@
+<?xml version="1.0"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<upgrade xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
+  <target>2.2.*.*</target>
+  <target-stack>HDP-2.2.0</target-stack>
+  <type>ROLLING</type>
+
+  <prerequisite-checks>
+    <!-- List of additional pre-req checks to run in addition to the required pre-reqs -->
+    <check>org.apache.ambari.server.checks.HiveMultipleMetastoreCheck</check>
+    <check>org.apache.ambari.server.checks.MapReduce2JobHistoryStatePreservingCheck</check>
+    <check>org.apache.ambari.server.checks.SecondaryNamenodeDeletedCheck</check>
+    <check>org.apache.ambari.server.checks.ServicesMapReduceDistributedCacheCheck</check>
+    <check>org.apache.ambari.server.checks.ServicesNamenodeHighAvailabilityCheck</check>
+    <check>org.apache.ambari.server.checks.ServicesTezDistributedCacheCheck</check>
+    <check>org.apache.ambari.server.checks.ServicesYarnWorkPreservingCheck</check>
+    <check>org.apache.ambari.server.checks.YarnRMHighAvailabilityCheck</check>
+    <check>org.apache.ambari.server.checks.YarnTimelineServerStatePreservingCheck</check>
+  </prerequisite-checks>
+  
+  <order>
+    <group xsi:type="cluster" name="PRE_CLUSTER" title="Pre {{direction.text.proper}}">
+      <execute-stage title="Confirm 1">
+        <task xsi:type="manual">
+          <message>Foo</message>
+        </task>
+      </execute-stage>
+      <execute-stage service="HDFS" component="NAMENODE" title="Pre Upgrade HIVE">
+        <task xsi:type="manual">
+          <message>Back stuff up.</message>
+        </task>
+      </execute-stage>
+      <execute-stage service="HDFS" component="NAMENODE" title="Finalize HDFS">
+        <task xsi:type="execute">
+          <command>ls</command>
+        </task>
+      </execute-stage>
+      <execute-stage title="Confirm 2">
+        <task xsi:type="manual">
+          <message>Foo</message>
+        </task>
+      </execute-stage>
+    </group>
+  
+    <group name="ZOOKEEPER" title="Zookeeper">
+      <skippable>true</skippable>
+      <allow-retry>false</allow-retry>
+      <service name="ZOOKEEPER">
+        <component>ZOOKEEPER_SERVER</component>
+        <component>ZOOKEEPER_CLIENT</component>
+      </service>
+    </group>
+    
+    <group name="CORE_MASTER" title="Core Masters">
+      <service name="HDFS">
+        <component>JOURNALNODE</component>
+        <component>NAMENODE</component>
+      </service>
+      <service name="YARN">
+        <component>RESOURCEMANAGER</component>
+      </service>
+    </group>
+    
+    <group name="CORE_SLAVES" title="Core Slaves" xsi:type="colocated">
+      <skippable>true</skippable>      <!-- set skippable for test -->
+      <allow-retry>false</allow-retry> <!-- set no retry for test -->
+      <service name="HDFS">
+        <component>DATANODE</component>
+      </service>
+      <service name="HBASE">
+        <component>REGIONSERVER</component>
+      </service>
+      <service name="YARN">
+        <component>NODEMANAGER</component>
+      </service>
+      
+      <batch>
+        <percent>20</percent>
+        <message>Please run additional tests on {{components}}</message>
+      </batch>
+    </group>
+    
+    <group name="HIVE" title="Hive">
+      <skippable>true</skippable>
+      <service name="HIVE">
+        <component>HIVE_METASTORE</component>
+        <component>HIVE_SERVER</component>
+        <component>WEBHCAT_SERVER</component>
+      </service>
+    </group>
+
+    <group name="OOZIE" title="Oozie">
+      <skippable>true</skippable>
+      <supports-auto-skip-failure>false</supports-auto-skip-failure>
+      <service-check>false</service-check>
+      <service name="OOZIE">
+        <component>OOZIE_SERVER</component>
+        <component>OOZIE_CLIENT</component>
+      </service>
+    </group>
+    
+    <group xsi:type="cluster" name="ALL_HOSTS" title="All Hosts">
+      <scope>COMPLETE</scope>
+      <execute-stage title="All Hosts">
+        <task xsi:type="manual">
+          <message>Something to run on all hosts</message>
+        </task>
+      </execute-stage>
+    </group>
+    
+    <group xsi:type="cluster" name="POST_CLUSTER" title="Finalize {{direction.text.proper}}">
+      <execute-stage title="Confirm Finalize">
+        <task xsi:type="manual">
+          <message>Please confirm you are ready to finalize</message>
+        </task>
+      </execute-stage>
+      <execute-stage service="HDFS" component="NAMENODE" title="Execute HDFS Finalize">
+        <task xsi:type="execute">
+          <command>ls</command>
+        </task>
+      </execute-stage>
+      <execute-stage title="Save Cluster State" service="" component="">
+        <task xsi:type="server_action" class="org.apache.ambari.server.serveraction.upgrades.FinalizeUpgradeAction">
+        </task>
+      </execute-stage>
+    </group>
+  </order>
+
+  <processing>
+    <service name="ZOOKEEPER">
+      <component name="ZOOKEEPER_SERVER">
+        <pre-upgrade>
+          <task xsi:type="manual">
+            <summary>SUMMARY OF PREPARE</summary>
+            <message>This is a manual task with a placeholder of {{foo/bar}}</message>
+          </task>
+          <task xsi:type="manual">
+            <scope>PARTIAL</scope>
+            <summary>Only partials</summary>
+            <message>This is only for partials</message>
+          </task>
+        </pre-upgrade>
+        <upgrade>
+          <task xsi:type="restart-task" />
+        </upgrade>
+        <post-upgrade>
+          <task xsi:type="configure" id="2.2.0" />
+        </post-upgrade>
+      </component>
+    </service>
+    
+    <service name="HDFS">
+      <component name="NAMENODE">
+        <pre-upgrade>
+          <task xsi:type="execute" hosts="master">
+            <command>su - {hdfs-user} -c 'dosomething'</command>
+          </task>
+          <task xsi:type="configure" id="hdp_2_1_1_nn_pre_upgrade" />
+          <task xsi:type="manual">
+            <message>{{direction.verb.proper}} your database</message>
+          </task>
+        </pre-upgrade>
+        <upgrade>
+          <task xsi:type="restart-task" />
+        </upgrade>
+        <post-upgrade>
+          <task xsi:type="execute">
+            <command>ls</command>
+          </task>
+        </post-upgrade>
+      </component>
+      <component name="DATANODE">
+        <pre-downgrade />
+        <upgrade>
+          <task xsi:type="restart-task" />
+        </upgrade>
+        <post-downgrade>
+          <task xsi:type="manual">
+            <message>Manual Downgrade</message>
+          </task>
+        </post-downgrade>
+      </component>
+    </service>
+    
+    <service name="YARN">
+      <component name="RESOURCEMANAGER">
+        <pre-upgrade>
+          <task xsi:type="execute">
+            <command>ls</command>
+          </task>
+        </pre-upgrade>
+      </component>
+      <component name="NODEMANAGER">
+        <pre-upgrade>
+          <task xsi:type="execute">
+            <command>ls</command>
+          </task>
+          <task xsi:type="configure" id="hdp_2_1_1_nm_pre_upgrade"/>
+        </pre-upgrade>
+      </component>
+    </service>
+    
+    <service name="HIVE">
+      <component name="HIVE_SERVER">
+        <pre-upgrade>
+          <task xsi:type="manual">
+            <summary>HiveServer Port Availability</summary>
+            <message>The HiveServer port will now change to 10010 if hive is using a binary transfer mode or 10011 if hive is using an http transport mode. You can use "netstat -anp | grep 1001[01]" to determine if the port is available on each of following HiveServer host(s): {{hosts.all}}. If the port is not available, the process using it must be terminated.</message>
+          </task>
+          <task xsi:type="configure" id="hdp_2_1_1_set_transport_mode"/>
+          <task xsi:type="configure" id="hdp_2_1_1_hive_server_foo"/>
+        </pre-upgrade>
+       </component>
+     </service>
+
+    <service name="OOZIE">
+      <component name="OOZIE_SERVER">
+        <pre-upgrade>
+          <!-- This is important, do not remove it since UpgradeHelperTest.java :
+          testUpgradeWithMultipleTasksWithMultipleHostTypes() asserts
+          that these tasks each run on their own stage. -->
+          <task xsi:type="execute" hosts="all" sequential="true" summary="Shut down all Oozie servers">
+            <script>scripts/oozie_server.py</script>
+            <function>stop</function>
+          </task>
+
+          <task xsi:type="execute" hosts="any" sequential="true" summary="Upgrading the Oozie database and creating a new sharelib">
+            <script>scripts/oozie_server_upgrade.py</script>
+            <function>upgrade_oozie_database_and_sharelib</function>
+          </task>
+        </pre-upgrade>
+        <upgrade>
+          <task xsi:type="restart-task" />
+        </upgrade>
+      </component>
+
+      <component name="OOZIE_CLIENT">
+        <upgrade>
+          <task xsi:type="restart-task" />
+        </upgrade>
+      </component>
+    </service>
+  </processing>
+</upgrade>


[17/50] [abbrv] ambari git commit: AMBARI-14837. Versions: display all versions as tabs on left side, version details on selecting.(xiwang)

Posted by nc...@apache.org.
AMBARI-14837. Versions: display all versions as tabs on left side, version details on selecting.(xiwang)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/0f9da428
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/0f9da428
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/0f9da428

Branch: refs/heads/trunk
Commit: 0f9da42829c5d7118284d09e98190403bfdce41e
Parents: efc3a07
Author: Xi Wang <xi...@apache.org>
Authored: Wed Feb 3 16:15:24 2016 -0800
Committer: Xi Wang <xi...@apache.org>
Committed: Wed Feb 3 16:15:24 2016 -0800

----------------------------------------------------------------------
 .../stackVersions/StackVersionsCreateCtrl.js    |  35 +--
 .../stackVersions/StackVersionsEditCtrl.js      | 241 ++++++++++++-------
 .../stackVersions/StackVersionsListCtrl.js      | 127 +++-------
 .../ui/admin-web/app/scripts/i18n.config.js     |   1 +
 .../ui/admin-web/app/scripts/services/Stack.js  |  30 ++-
 .../resources/ui/admin-web/app/styles/main.css  |  80 +++++-
 .../admin-web/app/views/stackVersions/list.html | 105 +++-----
 .../views/stackVersions/stackVersionPage.html   |  50 +++-
 .../stackVersions/StackversionsListCtrl_test.js | 152 ------------
 9 files changed, 357 insertions(+), 464 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/0f9da428/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/stackVersions/StackVersionsCreateCtrl.js
----------------------------------------------------------------------
diff --git a/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/stackVersions/StackVersionsCreateCtrl.js b/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/stackVersions/StackVersionsCreateCtrl.js
index 37f9c34..df76c15 100644
--- a/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/stackVersions/StackVersionsCreateCtrl.js
+++ b/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/stackVersions/StackVersionsCreateCtrl.js
@@ -23,7 +23,6 @@ angular.module('ambariAdminConsole')
   $scope.createController = true;
   $scope.osList = [];
   $scope.skipValidation = false;
-  $scope.repoSubversion = "";
 
   $scope.clusterName = $routeParams.clusterName;
   $scope.subversionPattern = /^\d+\.\d+(-\d+)?$/;
@@ -111,10 +110,8 @@ angular.module('ambariAdminConsole')
         };
       });
       $scope.repoVersionFullName = response.repoVersionFullName;
-      $scope.selectedOS = [];
       angular.forEach(response.osList, function (os) {
         os.selected = true;
-        $scope.selectedOS.push(os.OperatingSystems.os_type);
       });
       $scope.osList = response.osList;
       // load supported os type base on stack version
@@ -130,8 +127,12 @@ angular.module('ambariAdminConsole')
       .then(function (data) {
         var operatingSystems = data.operating_systems;
         operatingSystems.map(function (os) {
-          // os not in the list, mark as un-selected, add this to the osList
-          if ($scope.selectedOS.indexOf(os.OperatingSystems.os_type) < 0) {
+          var existingOSHash = {};
+          angular.forEach($scope.osList, function (os) {
+            existingOSHash[os.OperatingSystems.os_type] = os;
+          });
+          // if os not in the list, mark as un-selected, add this to the osList
+          if (!existingOSHash[os.OperatingSystems.os_type]) {
             os.selected = false;
             os.repositories.forEach(function(repo) {
               repo.Repositories.base_url = '';
@@ -183,9 +184,10 @@ angular.module('ambariAdminConsole')
       if (invalidUrls.length === 0) {
         Stack.addRepo($scope.upgradeStack, $scope.actualVersion, $scope.osList)
           .success(function () {
-            var versionName = $scope.actualVersion + '';
-            var stackName = $scope.upgradeStack.stack_name;
-            Alert.success($t('versions.alerts.versionCreated'), {stackName: stackName, versionName: versionName});
+            Alert.success($t('versions.alerts.versionCreated', {
+              stackName: $scope.upgradeStack.stack_name,
+              versionName: $scope.actualVersion
+            }));
             $location.path('/stackVersions');
           })
           .error(function (data) {
@@ -196,19 +198,12 @@ angular.module('ambariAdminConsole')
       }
     });
   };
-  /**
-   * TODO create parent controller for StackVersionsEditCtrl and StackVersionsCreateCtrl and
-   * move this method to it
-   */
+
   $scope.cancel = function () {
     $scope.editVersionDisabled = true;
     $location.path('/stackVersions');
   };
 
-  /**
-   * TODO create parent controller for StackVersionsEditCtrl and StackVersionsCreateCtrl and
-   * move this method to it
-   */
   $scope.clearErrors = function() {
     if ($scope.osList) {
       $scope.osList.forEach(function(os) {
@@ -221,18 +216,10 @@ angular.module('ambariAdminConsole')
     }
   };
 
-  /**
-   * TODO create parent controller for StackVersionsEditCtrl and StackVersionsCreateCtrl and
-   * move this method to it
-   */
   $scope.clearError = function() {
     this.repository.hasError = false;
   };
 
-  /**
-   * TODO create parent controller for StackVersionsEditCtrl and StackVersionsCreateCtrl and
-   * move this method to it
-   */
   $scope.hasValidationErrors = function() {
     var hasErrors = false;
     if ($scope.osList) {

http://git-wip-us.apache.org/repos/asf/ambari/blob/0f9da428/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/stackVersions/StackVersionsEditCtrl.js
----------------------------------------------------------------------
diff --git a/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/stackVersions/StackVersionsEditCtrl.js b/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/stackVersions/StackVersionsEditCtrl.js
index 0763726..9c5b2eb 100644
--- a/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/stackVersions/StackVersionsEditCtrl.js
+++ b/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/stackVersions/StackVersionsEditCtrl.js
@@ -24,17 +24,27 @@ angular.module('ambariAdminConsole')
   $scope.osList = [];
   $scope.skipValidation = false;
   $scope.selectedOS = 0;
+  $scope.upgradeStack = {
+    stack_name: '',
+    stack_version: '',
+    display_name: ''
+  };
 
   $scope.loadStackVersionInfo = function () {
     return Stack.getRepo($routeParams.versionId, $routeParams.stackName).then(function (response) {
       $scope.id = response.id;
-      $scope.stack = response.stack;
-      $scope.stackName = response.stackName;
-      $scope.versionName = response.versionName;
-      $scope.displayName = response.displayName;
-      $scope.stackVersion = response.stackVersion;
+      $scope.isPatch = response.type == 'PATCH';
+      $scope.stackNameVersion = response.stackNameVersion || 'n/a';
+      $scope.displayName = response.displayName || 'n/a';
+      $scope.version = response.version || 'n/a';
+      $scope.actualVersion = response.actualVersion || 'n/a';
       $scope.updateObj = response.updateObj;
-      $scope.subversion = response.versionName.substring(4); // cut off stack version
+      $scope.upgradeStack = {
+        stack_name: response.stackName,
+        stack_version: response.stackVersion,
+        display_name: response.displayName
+      };
+      $scope.services = response.services || [];
       //save default values of repos to check if they were changed
       $scope.defaulfOSRepos = {};
       response.updateObj.operating_systems.forEach(function(os) {
@@ -47,8 +57,10 @@ angular.module('ambariAdminConsole')
       angular.forEach(response.osList, function (os) {
         os.selected = true;
       });
-      $scope.selectedOS = response.osList.length;
       $scope.osList = response.osList;
+      // load supported os type base on stack version
+      $scope.afterStackVersionRead();
+
       // if user reach here from UI click, repo status should be cached
       // otherwise re-fetch repo status from cluster end point.
       $scope.repoStatus = Cluster.repoStatusCache[$scope.id];
@@ -63,50 +75,40 @@ angular.module('ambariAdminConsole')
       } else {
         $scope.deleteEnabled = $scope.isDeletable();
       }
-      $scope.addMissingOSList();
+      // fetch all repos to display the left menu
+      $scope.fetchRepos();
     });
   };
 
-  $scope.isDeletable = function() {
-    return !($scope.repoStatus == 'current' || $scope.repoStatus == 'installed');
+  /**
+   * Load supported OS list
+   */
+  $scope.afterStackVersionRead = function () {
+    Stack.getSupportedOSList($scope.upgradeStack.stack_name, $scope.upgradeStack.stack_version)
+      .then(function (data) {
+        var operatingSystems = data.operating_systems;
+        operatingSystems.map(function (os) {
+          var existingOSHash = {};
+          angular.forEach($scope.osList, function (os) {
+            existingOSHash[os.OperatingSystems.os_type] = os;
+          });
+          // if os not in the list, mark as un-selected, add this to the osList
+          if (!existingOSHash[os.OperatingSystems.os_type]) {
+            os.selected = false;
+            os.repositories.forEach(function(repo) {
+              repo.Repositories.base_url = '';
+            });
+            $scope.osList.push(os);
+          }
+        });
+      })
+      .catch(function (data) {
+        Alert.error($t('versions.alerts.osListError'), data.message);
+      });
   };
 
-  $scope.addMissingOSList = function() {
-    Stack.getSupportedOSList($scope.stackName, $scope.stackVersion)
-    .then(function (data) {
-      var existingOSHash = {};
-      angular.forEach($scope.osList, function (os) {
-        existingOSHash[os.OperatingSystems.os_type] = os;
-      });
-      var osList = data.operating_systems.map(function (os) {
-        return existingOSHash[os.OperatingSystems.os_type] || {
-          OperatingSystems: {
-            os_type : os.OperatingSystems.os_type
-          },
-          repositories: [
-            {
-              Repositories: {
-                base_url: '',
-                repo_id: 'HDP-' + $routeParams.versionId,
-                repo_name: 'HDP'
-              }
-            },
-            {
-              Repositories: {
-                base_url: '',
-                repo_id: 'HDP-UTILS-' + $routeParams.versionId,
-                repo_name: 'HDP-UTILS'
-              }
-            }
-          ],
-          selected: false
-        };
-      });
-      $scope.osList = osList;
-    })
-    .catch(function (data) {
-      Alert.error($t('versions.alerts.osListError'), data.message);
-    });
+  $scope.isDeletable = function() {
+    return !($scope.repoStatus == 'current' || $scope.repoStatus == 'installed');
   };
 
   $scope.defaulfOSRepos = {};
@@ -143,17 +145,13 @@ angular.module('ambariAdminConsole')
   };
 
   $scope.updateRepoVersions = function () {
-    var upgradeStack = {
-      stack_name: $scope.stackName,
-      stack_version: $scope.stackVersion
-    };
-    return Stack.validateBaseUrls($scope.skipValidation, $scope.osList, upgradeStack).then(function (invalidUrls) {
+    return Stack.validateBaseUrls($scope.skipValidation, $scope.osList, $scope.upgradeStack).then(function (invalidUrls) {
       if (invalidUrls.length === 0) {
-        Stack.updateRepo($scope.stackName, $scope.stackVersion, $scope.id, $scope.updateObj).then(function () {
+        Stack.updateRepo($scope.upgradeStack.stack_name, $scope.upgradeStack.stack_version, $scope.id, $scope.updateObj).then(function () {
           Alert.success($t('versions.alerts.versionEdited', {
-            stackName: $scope.stackName,
-            versionName: $scope.versionName,
-            displayName: $scope.displayName
+            stackName: $scope.upgradeStack.stack_name,
+            versionName: $scope.actualVersion,
+            displayName: $scope.repoVersionFullName
           }));
           $location.path('/stackVersions');
         }).catch(function (data) {
@@ -187,22 +185,56 @@ angular.module('ambariAdminConsole')
       $t('versions.deregister'),
       {
         "url": 'views/modals/BodyForDeregisterVersion.html',
-        "scope": {"displayName": $scope.displayName }
+        "scope": {"displayName": $scope.repoVersionFullName }
       }
     ).then(function() {
-      Stack.deleteRepo($scope.stackName, $scope.stackVersion, $scope.id).then( function () {
-        $location.path('/stackVersions');
-      }).catch(function (data) {
-        Alert.error($t('versions.alerts.versionDeleteError'), data.message);
+        Stack.deleteRepo($scope.upgradeStack.stack_name, $scope.upgradeStack.stack_version, $scope.id).then( function () {
+          $location.path('/stackVersions');
+        }).catch(function (data) {
+            Alert.error($t('versions.alerts.versionDeleteError'), data.message);
+          });
       });
-    });
   };
   $scope.loadStackVersionInfo();
-    
+
+  /**
+   * On click handler for removing OS
+   */
+  $scope.removeOS = function() {
+    this.os.selected = false;
+    if (this.os.repositories) {
+      this.os.repositories.forEach(function(repo) {
+        repo.hasError = false;
+      });
+    }
+  };
   /**
-   * TODO create parent controller for StackVersionsEditCtrl and StackVersionsCreateCtrl and
-   * move this method to it
+   * On click handler for adding new OS
    */
+  $scope.addOS = function() {
+    this.os.selected = true;
+    if (this.os.repositories) {
+      this.os.repositories.forEach(function(repo) {
+        repo.hasError = false;
+      });
+    }
+  };
+
+  $scope.isSaveButtonDisabled = function() {
+    var enabled = false;
+    $scope.osList.forEach(function(os) {
+      if (os.selected) {
+        enabled = true
+      }
+    });
+    return !enabled;
+  }
+
+  $scope.cancel = function () {
+    $scope.editVersionDisabled = true;
+    $location.path('/stackVersions');
+  };
+
   $scope.clearErrors = function() {
     if ($scope.osList) {
       $scope.osList.forEach(function(os) {
@@ -214,29 +246,11 @@ angular.module('ambariAdminConsole')
       });
     }
   };
-  /**
-   * TODO create parent controller for StackVersionsEditCtrl and StackVersionsCreateCtrl and
-   * move this method to it
-   */
+
   $scope.clearError = function () {
     this.repository.hasError = false;
   };
 
-  /**
-   * TODO create parent controller for StackVersionsEditCtrl and StackVersionsCreateCtrl and
-   * move this method to it
-   */
-  $scope.toggleOSSelect = function () {
-    this.os.repositories.forEach(function (repo) {
-      repo.hasError = false;
-    });
-    this.os.selected ? $scope.selectedOS++ : $scope.selectedOS--;
-  };
-
-  /**
-   * TODO create parent controller for StackVersionsEditCtrl and StackVersionsCreateCtrl and
-   * move this method to it
-   */
   $scope.hasValidationErrors = function () {
     var hasErrors = false;
     if ($scope.osList) {
@@ -253,13 +267,62 @@ angular.module('ambariAdminConsole')
     return hasErrors;
   };
 
+
+  // add all repos list
+  $scope.filter = {
+    version: '',
+    cluster: {
+      options: [],
+      current: null
+    }
+  };
+
+  $scope.pagination = {
+    totalRepos: 100,
+    maxVisiblePages: 1,
+    itemsPerPage: 100,
+    currentPage: 1
+  };
+  $scope.allRepos = [];
+  $scope.stackVersions = [];
+
+
+
   /**
-   * TODO create parent controller for StackVersionsEditCtrl and StackVersionsCreateCtrl and
-   * move this method to it
+   *  Formatted object to display all repos:
+   *
+   *  [{ 'name': 'HDP-2.3',
+   *     'repos': ['2.3.6.0-2343', '2.3.4.1', '2.3.4.0-56']
+   *   },
+   *   { 'name': 'HDP-2.2',
+   *     'repos': ['2.2.6.0', '2.2.4.5', '2.2.4.0']
+   *   }
+   *  ]
+   *
    */
-  $scope.cancel = function () {
-    $scope.editVersionDisabled = true;
-    $location.path('/stackVersions');
+  $scope.fetchRepos = function () {
+    return Stack.allRepos($scope.filter, $scope.pagination).then(function (repos) {
+      $scope.allRepos = repos.items.sort(function(a, b){return a.repository_version < b.repository_version});
+      var existingStackHash = {};
+      var stackVersions = [];
+      angular.forEach($scope.allRepos, function (repo) {
+        var stackVersionName = repo.stack_name + '-' + repo.stack_version;
+        var currentStackVersion = $scope.upgradeStack.stack_name + '-' + $scope.upgradeStack.stack_version;
+        repo.isActive = $scope.actualVersion == repo.repository_version;
+        if (!existingStackHash[stackVersionName]) {
+          existingStackHash[stackVersionName] = true;
+          stackVersions.push({
+            'name': stackVersionName,
+            'isOpened': stackVersionName == currentStackVersion,
+            'repos': [repo]
+          });
+        } else {
+          if (stackVersions[stackVersions.length -1].repos) {
+            stackVersions[stackVersions.length -1].repos.push(repo);
+          }
+        }
+      });
+      $scope.stackVersions = stackVersions;
+    });
   };
-
 }]);

http://git-wip-us.apache.org/repos/asf/ambari/blob/0f9da428/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/stackVersions/StackVersionsListCtrl.js
----------------------------------------------------------------------
diff --git a/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/stackVersions/StackVersionsListCtrl.js b/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/stackVersions/StackVersionsListCtrl.js
index 010ec1b..3a8233a 100644
--- a/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/stackVersions/StackVersionsListCtrl.js
+++ b/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/stackVersions/StackVersionsListCtrl.js
@@ -24,6 +24,7 @@ angular.module('ambariAdminConsole')
     return $t('common.' + key).toLowerCase();
   }
   $scope.clusterName = $routeParams.clusterName;
+
   $scope.filter = {
     version: '',
     cluster: {
@@ -31,106 +32,52 @@ angular.module('ambariAdminConsole')
       current: null
     }
   };
-  $scope.isNotEmptyFilter = true;
 
   $scope.pagination = {
-    totalRepos: 10,
-    maxVisiblePages: 20,
-    itemsPerPage: 10,
+    totalRepos: 100,
+    maxVisiblePages: 1,
+    itemsPerPage: 100,
     currentPage: 1
   };
+  $scope.allRepos = [];
+  $scope.stackVersions = [];
 
-  $scope.tableInfo = {
-    total: 0,
-    showed: 0,
-    filtered: 0
-  };
-
-  $scope.stacks = [];
-  $scope.dropDownClusters = [];
-  $scope.selectedCluster = $scope.dropDownClusters[0];
-
-  $scope.resetPagination = function () {
-    $scope.pagination.currentPage = 1;
-    $scope.loadAllData();
-  };
-
-  $scope.pageChanged = function () {
-    $scope.loadAllData();
-  };
-
-  $scope.goToCluster = function() {
-    window.location.replace('/#/main/admin/stack/versions');
-  };
-
-  $scope.clearFilters = function () {
-    $scope.filter.version = '';
-    $scope.filter.cluster.current = $scope.filter.cluster.options[0];
-    $scope.resetPagination();
-  };
-
-  $scope.fetchRepoClusterStatus = function () {
-    var clusterName = ($scope.clusters && $scope.clusters.length > 0) ? $scope.clusters[0].Clusters.cluster_name : null; // only support one cluster at the moment
-    if (clusterName) {
-      angular.forEach($scope.repos, function (repo) {
-        Cluster.getRepoVersionStatus(clusterName, repo.id).then(function (response) {
-          repo.status = response.status;
-          repo.totalHosts = response.totalHosts;
-          repo.currentHosts = response.currentHosts;
-          repo.installedHosts = response.installedHosts;
-          repo.stackVersionId = response.stackVersionId;
-          repo.cluster = (repo.status == 'current' || repo.status == 'installed')? clusterName : '';
-        });
-      });
-    }
-  };
-
+  /**
+   *  Formatted object to display all repos:
+   *
+   *  [{ 'name': 'HDP-2.3',
+   *     'repos': ['2.3.6.0-2343', '2.3.4.1', '2.3.4.0-56']
+   *   },
+   *   { 'name': 'HDP-2.2',
+   *     'repos': ['2.2.6.0', '2.2.4.5', '2.2.4.0']
+   *   }
+   *  ]
+   *
+   */
   $scope.fetchRepos = function () {
     return Stack.allRepos($scope.filter, $scope.pagination).then(function (repos) {
-      $scope.pagination.totalRepos = repos.itemTotal;
-      $scope.repos = repos.items;
-      $scope.tableInfo.total = repos.itemTotal;
-      $scope.tableInfo.showed = repos.showed;
-    });
-  };
-
-    $scope.fillClusters = function (clusters) {
-      $scope.dropDownClusters = [].concat(clusters);
-      var options = [{label: $t('common.all'), value: ''}];
-      angular.forEach(clusters, function (cluster) {
-        options.push({
-          label: cluster.Clusters.cluster_name,
-          value: cluster.Clusters.cluster_name
-        });
+      $scope.allRepos = repos.items.sort(function(a, b){return a.repository_version < b.repository_version});
+      var existingStackHash = {};
+      var stackVersions = [];
+      angular.forEach($scope.allRepos, function (repo) {
+        var stackVersionName = repo.stack_name + '-' + repo.stack_version;
+        if (!existingStackHash[stackVersionName]) {
+          existingStackHash[stackVersionName] = true;
+          stackVersions.push({
+            'name': stackVersionName,
+            'isOpened': true,
+            'repos': [repo]
+          });
+        } else {
+          if (stackVersions[stackVersions.length -1].repos) {
+            stackVersions[stackVersions.length -1].repos.push(repo);
+          }
+        }
       });
-      $scope.filter.cluster.options = options;
-      if (!$scope.filter.cluster.current) {
-        $scope.filter.cluster.current = options[0];
-      }
-    };
-
-  $scope.fetchClusters = function () {
-    return Cluster.getAllClusters().then(function (clusters) {
-      if (clusters && clusters.length > 0) {
-        $scope.clusters = clusters;
-        $scope.fillClusters(clusters);
-      }
-    });
-  };
-
-  $scope.loadAllData = function () {
-    $scope.fetchClusters()
-    .then(function () {
-      return $scope.fetchRepos();
-    })
-    .then(function () {
-      $scope.fetchRepoClusterStatus();
+      $scope.stackVersions = stackVersions;
     });
   };
 
-  $scope.loadAllData();
+  $scope.fetchRepos();
 
-  $scope.$watch('filter', function (filter) {
-    $scope.isNotEmptyFilter = Boolean(filter.version || (filter.cluster.current && filter.cluster.current.value));
-  }, true);
 }]);

http://git-wip-us.apache.org/repos/asf/ambari/blob/0f9da428/ambari-admin/src/main/resources/ui/admin-web/app/scripts/i18n.config.js
----------------------------------------------------------------------
diff --git a/ambari-admin/src/main/resources/ui/admin-web/app/scripts/i18n.config.js b/ambari-admin/src/main/resources/ui/admin-web/app/scripts/i18n.config.js
index 9f6add0..afc2f44 100644
--- a/ambari-admin/src/main/resources/ui/admin-web/app/scripts/i18n.config.js
+++ b/ambari-admin/src/main/resources/ui/admin-web/app/scripts/i18n.config.js
@@ -306,6 +306,7 @@ angular.module('ambariAdminConsole')
       'os': 'OS',
       'baseURL': 'Base URL',
       'skipValidation': 'Skip Repository Base URL validation (Advanced)',
+      'noVersions': 'Select version to display details.',
       'contents': {
         'title': 'Contents',
         'empty': 'No contents to display'

http://git-wip-us.apache.org/repos/asf/ambari/blob/0f9da428/ambari-admin/src/main/resources/ui/admin-web/app/scripts/services/Stack.js
----------------------------------------------------------------------
diff --git a/ambari-admin/src/main/resources/ui/admin-web/app/scripts/services/Stack.js b/ambari-admin/src/main/resources/ui/admin-web/app/scripts/services/Stack.js
index a12b430..20781b6 100644
--- a/ambari-admin/src/main/resources/ui/admin-web/app/scripts/services/Stack.js
+++ b/ambari-admin/src/main/resources/ui/admin-web/app/scripts/services/Stack.js
@@ -157,14 +157,27 @@ angular.module('ambariAdminConsole')
         var response = {
           id : data.repository_versions[0].RepositoryVersions.id,
           stackVersion : data.Versions.stack_version,
-          stack: data.Versions.stack_name + '-' + data.Versions.stack_version,
           stackName: data.Versions.stack_name,
-          versionName: data.repository_versions[0].RepositoryVersions.repository_version,
-          displayName : data.repository_versions[0].RepositoryVersions.display_name,
+          type: data.repository_versions[0].RepositoryVersions.release? data.repository_versions[0].RepositoryVersions.release.type: null,
+          stackNameVersion: data.Versions.stack_name + '-' + data.Versions.stack_version, /// HDP-2.3
+          actualVersion: data.repository_versions[0].RepositoryVersions.repository_version, /// 2.3.4.0-3846
+          version: data.repository_versions[0].RepositoryVersions.release ? data.repository_versions[0].RepositoryVersions.release.version: null, /// 2.3.4.0
+          releaseNotes: data.repository_versions[0].RepositoryVersions.release ? data.repository_versions[0].RepositoryVersions.release.release_notes: null,
+          displayName: data.repository_versions[0].RepositoryVersions.release ? data.Versions.stack_name + '-' + data.repository_versions[0].RepositoryVersions.release.version :
+            data.Versions.stack_name + '-' + data.repository_versions[0].RepositoryVersions.repository_version.split('-')[0], //HDP-2.3.4.0
           repoVersionFullName : data.Versions.stack_name + '-' + data.repository_versions[0].RepositoryVersions.repository_version,
           osList: data.repository_versions[0].operating_systems,
           updateObj: data.repository_versions[0]
         };
+        var services = [];
+        angular.forEach(data.repository_versions[0].RepositoryVersions.services, function (service) {
+          services.push({
+            name: service.name,
+            version: service.versions[0].version,
+            components: service.versions[0].components
+          });
+        });
+        response.services = services;
         deferred.resolve(response);
       })
       .error(function (data) {
@@ -286,12 +299,13 @@ angular.module('ambariAdminConsole')
             id : data.repository_versions[0].RepositoryVersions.id,
             stackVersion : data.Versions.stack_version,
             stackName: data.Versions.stack_name,
-            type: data.repository_versions[0].RepositoryVersions.release.type,
+            type: data.repository_versions[0].RepositoryVersions.type,
             stackNameVersion: data.Versions.stack_name + '-' + data.Versions.stack_version, /// HDP-2.3
             actualVersion: data.repository_versions[0].RepositoryVersions.repository_version, /// 2.3.4.0-3846
-            version: data.repository_versions[0].RepositoryVersions.release.version, /// 2.3.4.0
-            releaseNotes: data.repository_versions[0].RepositoryVersions.release.release_notes,
-            displayName: data.Versions.stack_name + '-' + data.repository_versions[0].RepositoryVersions.release.version, //HDP-2.3.4.0
+            version: data.repository_versions[0].RepositoryVersions.release ? data.repository_versions[0].RepositoryVersions.release.version: null, /// 2.3.4.0
+            releaseNotes: data.repository_versions[0].RepositoryVersions.release ? data.repository_versions[0].RepositoryVersions.release.release_notes: null,
+            displayName: data.repository_versions[0].RepositoryVersions.release ? data.Versions.stack_name + '-' + data.repository_versions[0].RepositoryVersions.release.version :
+              data.Versions.stack_name + '-' + data.repository_versions[0].RepositoryVersions.repository_version.split('-')[0], //HDP-2.3.4.0
             repoVersionFullName : data.Versions.stack_name + '-' + data.repository_versions[0].RepositoryVersions.repository_version,
             osList: data.repository_versions[0].operating_systems,
             updateObj: data.repository_versions[0]
@@ -299,7 +313,7 @@ angular.module('ambariAdminConsole')
           var services = [];
           angular.forEach(data.repository_versions[0].RepositoryVersions.services, function (service) {
             services.push({
-              name: service.name,
+              name: service.display_name,
               version: service.versions[0].version,
               components: service.versions[0].components
             });

http://git-wip-us.apache.org/repos/asf/ambari/blob/0f9da428/ambari-admin/src/main/resources/ui/admin-web/app/styles/main.css
----------------------------------------------------------------------
diff --git a/ambari-admin/src/main/resources/ui/admin-web/app/styles/main.css b/ambari-admin/src/main/resources/ui/admin-web/app/styles/main.css
index 83d4ffa..93f2271 100644
--- a/ambari-admin/src/main/resources/ui/admin-web/app/styles/main.css
+++ b/ambari-admin/src/main/resources/ui/admin-web/app/styles/main.css
@@ -1378,11 +1378,57 @@ thead.view-permission-header > tr > th {
 }
 
 .enable-ldap input[type="checkbox"] {
-  margin-top: 10px;
+    margin-top: 10px;
 }
 
 .test-ldap-icon.ng-hide-add-active, .test-ldap-icon.ng-hide-remove {
-  display: inline-block!important;
+    display: inline-block!important;
+}
+
+.left-menu-all-repos {
+  padding-left: 5px;
+  padding-right: 5px;
+}
+
+.left-menu-all-repos .glyphicon.glyphicon-chevron-right{
+  -webkit-transition: all 0.3s;
+  -o-transition: all 0.3s;
+  transition: all 0.3s;
+}
+.left-menu-all-repos .glyphicon.glyphicon-chevron-right.opened{
+  -webkit-transform: rotateZ(90deg);
+  -ms-transform: rotateZ(90deg);
+  -o-transform: rotateZ(90deg);
+  transform: rotateZ(90deg);
+}
+
+.left-menu-all-repos .stack-version-title {
+  font-size: 14px;
+  cursor: pointer;
+  text-decoration: none;
+  padding-left: 10px;
+}
+
+.left-menu-all-repos .repos-table {
+  margin-bottom: 0px;
+}
+
+.left-menu-all-repos .panel-body {
+  padding: 15px 0px;
+}
+
+.left-menu-all-repos .repos-table .repos-td{
+  border-top: none;
+  padding: 5px 10px;
+}
+.left-menu-all-repos .repos-table .repos-td > a {
+  text-decoration: none;
+}
+.left-menu-all-repos .repos-table .repos-td.active{
+  background-color: #666;;
+}
+.left-menu-all-repos .repos-table .repos-td.active > a {
+  color: white;
 }
 
 .register-version-options .read-info-button {
@@ -1398,11 +1444,24 @@ thead.view-permission-header > tr > th {
   padding-bottom: 20px;
 }
 
-.register-version-form .patch-icon {
+.register-version-form .details-panel .patch-icon {
   color: #ff4500;
 }
+.register-version-form .deregister-button {
+    margin-top: -23px;
+}
+.register-version-form .version-info {
+    padding-top: 7px;
+    margin-top: 0;
+    margin-bottom: 0;
+}
+
+.register-version-form .contents-panel .version-contents-body {
+    max-height: 150px;
+    overflow: scroll;
+}
 
-.register-version-form .remove-icon {
+.register-version-form .repos-panel .remove-icon {
   color: red;
   margin: 20px 0px;
   padding: 0px;
@@ -1410,14 +1469,13 @@ thead.view-permission-header > tr > th {
   cursor: pointer;
 }
 
-.register-version-form .version-info {
-  padding-top: 7px;
-  margin-top: 0;
-  margin-bottom: 0;
+.register-version-form .repos-panel .os-type-label {
+  margin-top: 27px;;
 }
 
-.version-contents-body {
-  max-height: 150px;
-  overflow: scroll;
+#stack-versions .no-version-alert {
+  text-align: center;
 }
 
+
+

http://git-wip-us.apache.org/repos/asf/ambari/blob/0f9da428/ambari-admin/src/main/resources/ui/admin-web/app/views/stackVersions/list.html
----------------------------------------------------------------------
diff --git a/ambari-admin/src/main/resources/ui/admin-web/app/views/stackVersions/list.html b/ambari-admin/src/main/resources/ui/admin-web/app/views/stackVersions/list.html
index 5f5421b..3de92c1 100644
--- a/ambari-admin/src/main/resources/ui/admin-web/app/views/stackVersions/list.html
+++ b/ambari-admin/src/main/resources/ui/admin-web/app/views/stackVersions/list.html
@@ -29,85 +29,34 @@
     </div>
   </div>
   <hr/>
-  <table class="table table-striped table-hover">
-    <thead>
-    <tr>
-      <th class="col-medium">
-        <label>{{'common.name' | translate}}</label>
-        <input type="text" class="form-control" ng-change="resetPagination()" ng-model="filter.version" placeholder="{{'common.any' | translate}}">
-      </th>
-      <th class="col-small">
-        <label>{{'common.cluster' | translate}}</label>
-        <select class="form-control"
-                ng-change="resetPagination()"
-                ng-model="filter.cluster.current"
-                ng-options="item.label for item in filter.cluster.options track by item.value"
-          ></select>
-      </th>
-      <th></th>
-    </tr>
-    </thead>
-    <tbody>
-    <tr ng-repeat="repo in repos">
-      <td class="col-medium">
-        <a href="#/stackVersions/{{repo.stack_name}}/{{repo.repository_version}}/edit">{{repo.display_name}}</a>
-      </td>
-      <td class="col-small">
-        <a href="/#/main/admin/stack/versions" ng-show="repo.cluster">
-          {{repo.cluster}}
-        </a>
-        <span ng-show="!repo.cluster">
-          {{'common.none' | translate}}
-        </span>
-      </td>
-      <td class="verison-label-row">
-        <div ng-show="repo.status == 'current'">
-          <span class="label {{'status-' + repo.status}}">{{'versions.current' | translate}}:&nbsp;{{repo.currentHosts}}/{{repo.totalHosts}}</span>
-        </div>
-        <div ng-show="repo.status == 'installed'">
-          <span class="label {{'status-' + repo.status}}">{{'versions.installed' | translate}}:&nbsp;{{repo.installedHosts}}/{{repo.totalHosts}}</span>
-        </div>
-        <div ng-show="!repo.cluster">
-          <div class="btn-group display-inline-block" dropdown is-open="viewsdropdown.isopen" ng-mouseover="viewsdropdown.isopen=true" ng-mouseout="viewsdropdown.isopen=false" ng-init="viewsdropdown.isopen=false">
-            <a class="btn dropdown-toggle">
-              <span>{{'versions.installOn' | translate}}</span>
-            </a>
-            <ul class="dropdown-menu" ng-show="viewsdropdown.isopen">
-              <li ng-repeat="cluster in dropDownClusters">
-                <a href="javascript:void(null)" ng-click="goToCluster()">
-                    <span>{{cluster.Clusters.cluster_name}}</span>
-                </a>
-              </li>
-            </ul>
-          </div>
+
+  <accordion close-others="false" class="col-sm-2 left-menu-all-repos">
+    <accordion-group ng-repeat="stackVersion in stackVersions" is-open="stackVersion.isOpened">
+      <accordion-heading>
+        <div class="row stack-version-title">
+          <i class="glyphicon glyphicon-chevron-right" ng-class="{'opened': stackVersion.isOpened}"></i>
+          {{stackVersion.name}}
         </div>
-      </td>
-    </tr>
-    </tbody>
-  </table>
-  <div class="alert alert-info col-sm-12" ng-show="!repos.length">
-    {{'common.alerts.nothingToDisplay' | translate: '{term: getConstant("version")}'}}
-  </div>
-  <div class="col-sm-12 table-bar">
-    <div class="pull-left filtered-info">
-      <span>{{'common.filterInfo' | translate: '{showed: tableInfo.showed, total: tableInfo.total, term: getConstant("versions")}'}}</span>
-      <span ng-show="isNotEmptyFilter">- <a href ng-click="clearFilters()">{{'common.controls.clearFilters' | translate}}</a></span>
+      </accordion-heading>
+      <table class="table repos-table">
+        <tbody>
+        <tr ng-repeat="repo in stackVersion.repos">
+          <td class="repos-td">
+            <a href="#/stackVersions/{{repo.stack_name}}/{{repo.repository_version}}/edit">{{repo.repository_version}}</a>
+          </td>
+        </tr>
+        </tbody>
+        </table>
+    </accordion-group>
+    <div class="alert alert-info" ng-show="stackVersions && !stackVersions.length">
+      {{'versions.contents.empty' | translate}}
     </div>
-    <div class="pull-right left-margin">
-      <pagination class="paginator"
-                  total-items="pagination.totalRepos"
-                  max-size="pagination.maxVisiblePages"
-                  items-per-page="pagination.itemsPerPage"
-                  ng-model="pagination.currentPage"
-                  ng-change="pageChanged()"
-        ></pagination>
-    </div>
-    <div class="pull-right">
-      <select class="form-control"
-              ng-model="pagination.itemsPerPage"
-              ng-options="item for item in [10, 25, 50, 100]"
-              ng-change="resetPagination()"
-        ></select>
+  </accordion>
+
+  <form class="col-sm-10 form-horizontal" role="form"novalidate>
+    <div class="alert alert-info no-version-alert">
+      {{'versions.noVersions' | translate}}
     </div>
-  </div>
+  </form>
+
 </div>

http://git-wip-us.apache.org/repos/asf/ambari/blob/0f9da428/ambari-admin/src/main/resources/ui/admin-web/app/views/stackVersions/stackVersionPage.html
----------------------------------------------------------------------
diff --git a/ambari-admin/src/main/resources/ui/admin-web/app/views/stackVersions/stackVersionPage.html b/ambari-admin/src/main/resources/ui/admin-web/app/views/stackVersions/stackVersionPage.html
index 61d2d8d..1fad9a3 100644
--- a/ambari-admin/src/main/resources/ui/admin-web/app/views/stackVersions/stackVersionPage.html
+++ b/ambari-admin/src/main/resources/ui/admin-web/app/views/stackVersions/stackVersionPage.html
@@ -23,14 +23,15 @@
     <li class="active" ng-if="createController">{{'versions.register' | translate}}</li>
   </ol>
 
-  <div class="pull-right top-margin-4" ng-switch="deleteEnabled"  ng-if="editController">
-    <button ng-switch-when="false" class="btn disabled btn-default" tooltip="Cannot delete version already installed.">{{'versions.deregister' | translate}}</button>
-    <button ng-switch-when="true" class="btn btn-danger" ng-click="delete()">{{'versions.deregister' | translate}}</button>
+  <div class="pull-right top-margin-4">
+    <a href="#/stackVersions/create" class="btn btn-primary">
+      <span class="glyphicon glyphicon-plus"></span>
+        {{'versions.register' | translate}}
+    </a>
   </div>
 </div>
 <hr>
 
-
 <div id="upload-definition-file-panel" ng-if="createController">
   <div class="clearfix register-version-options">
     <div class="col-sm-5 option-radio-button">
@@ -60,10 +61,37 @@
   </div>
 </div>
 
-<form class="form-horizontal register-version-form" role="form" name="repoRegForm" novalidate>
-  <div class="panel panel-default">
+<accordion close-others="false" class="col-sm-2 left-menu-all-repos" ng-if="editController">
+  <accordion-group ng-repeat="stackVersion in stackVersions" is-open="stackVersion.isOpened">
+    <accordion-heading>
+      <div class="row stack-version-title">
+        <i class="glyphicon glyphicon-chevron-right" ng-class="{'opened': stackVersion.isOpened}"></i>
+          {{stackVersion.name}}
+      </div>
+    </accordion-heading>
+    <table class="table repos-table">
+      <tbody>
+      <tr ng-repeat="repo in stackVersion.repos">
+        <td class="repos-td" ng-class="{'active': repo.isActive}">
+            <a href="#/stackVersions/{{repo.stack_name}}/{{repo.repository_version}}/edit">{{repo.repository_version}}</a>
+        </td>
+      </tr>
+      </tbody>
+    </table>
+  </accordion-group>
+  <div class="alert alert-info" ng-show="stackVersions && !stackVersions.length">
+    {{'versions.contents.empty' | translate}}
+  </div>
+</accordion>
+
+<form ng-class="{'col-sm-10': editController}" class="form-horizontal register-version-form" role="form" name="repoRegForm" novalidate>
+  <div class="panel panel-default details-panel">
     <div class="panel-heading">
       <h3 class="panel-title">{{'common.details' | translate}}</h3>
+        <div class="pull-right deregister-button" ng-switch="deleteEnabled"  ng-if="editController">
+            <button ng-switch-when="false" class="btn disabled btn-default" tooltip="Cannot delete version already installed.">{{'versions.deregister' | translate}}</button>
+            <button ng-switch-when="true" class="btn btn-danger" ng-click="delete()">{{'versions.deregister' | translate}}</button>
+        </div>
     </div>
     <div class="panel-body">
       <div class="clearfix">
@@ -85,7 +113,7 @@
       </div>
     </div>
   </div>
-  <div class="panel panel-default">
+  <div class="panel panel-default contents-panel">
     <div class="panel-heading">
       <h3 class="panel-title">{{'versions.contents.title' | translate}}</h3>
     </div>
@@ -97,7 +125,7 @@
       </div>
     </div>
   </div>
-  <div class="panel panel-default">
+  <div class="panel panel-default repos-panel">
     <div class="panel-heading">
       <h3 class="panel-title">{{'versions.repos' | translate}}</h3>
     </div>
@@ -114,10 +142,8 @@
         <div ng-if="os.selected==true">
           <div class="clearfix border-bottom bottom-margin">
             <!-- show selected os in list table-->
-            <div class="col-sm-2">
-              <div class="">
-                <label>{{os.OperatingSystems.os_type}}</label>
-              </div>
+            <div class="col-sm-2 os-type-label">
+              <label>{{os.OperatingSystems.os_type}}</label>
             </div>
             <div class="col-sm-9">
               <div class="form-group {{repository.Repositories.repo_name}}" ng-class="{'has-error': repository.hasError }" ng-repeat="repository in os.repositories">

http://git-wip-us.apache.org/repos/asf/ambari/blob/0f9da428/ambari-admin/src/main/resources/ui/admin-web/test/unit/controllers/stackVersions/StackversionsListCtrl_test.js
----------------------------------------------------------------------
diff --git a/ambari-admin/src/main/resources/ui/admin-web/test/unit/controllers/stackVersions/StackversionsListCtrl_test.js b/ambari-admin/src/main/resources/ui/admin-web/test/unit/controllers/stackVersions/StackversionsListCtrl_test.js
index 1e47c20..6f168db 100644
--- a/ambari-admin/src/main/resources/ui/admin-web/test/unit/controllers/stackVersions/StackversionsListCtrl_test.js
+++ b/ambari-admin/src/main/resources/ui/admin-web/test/unit/controllers/stackVersions/StackversionsListCtrl_test.js
@@ -40,157 +40,5 @@ describe('#Cluster', function () {
 
     });
 
-    describe('#fillClusters()', function () {
-
-      var clusters = [
-          {
-            Clusters: {
-              cluster_name: 'c0'
-            }
-          }
-        ],
-        cases = [
-          {
-            prev: null,
-            current: {
-              label: 'All',
-              value: ''
-            },
-            title: 'no cluster selected before'
-          },
-          {
-            prev: {
-              label: 'c0',
-              value: 'c0'
-            },
-            current: {
-              label: 'c0',
-              value: 'c0'
-            },
-            title: 'cluster was selected before'
-          }
-        ];
-
-      angular.forEach(cases, function (item) {
-        it(item.title, function() {
-          scope.filter.cluster.current = item.prev;
-          scope.fillClusters(clusters);
-          expect(scope.dropDownClusters).toEqual(clusters);
-          expect(scope.filter.cluster.current).toEqual(item.current);
-        });
-      });
-
-    });
-
-    describe('#isNotEmptyFilter', function () {
-
-      var cases = [
-        {
-          filter: {
-            version: '',
-            cluster: {
-              current: null
-            }
-          },
-          isNotEmptyFilter: false,
-          title: 'no filters'
-        },
-        {
-          filter: {
-            version: '',
-            cluster: {
-              current: {
-                value: ''
-              }
-            }
-          },
-          isNotEmptyFilter: false,
-          title: 'empty filters'
-        },
-        {
-          filter: {
-            version: 'a',
-            cluster: {
-              current: {
-                value: ''
-              }
-            }
-          },
-          isNotEmptyFilter: true,
-          title: 'version filter'
-        },
-        {
-          filter: {
-            version: '0',
-            cluster: {
-              current: {
-                value: ''
-              }
-            }
-          },
-          isNotEmptyFilter: true,
-          title: 'version filter with "0" as string'
-        },
-        {
-          filter: {
-            version: '',
-            cluster: {
-              current: {
-                value: 'a'
-              }
-            }
-          },
-          isNotEmptyFilter: true,
-          title: 'cluster filter'
-        },
-        {
-          filter: {
-            version: '',
-            cluster: {
-              current: {
-                value: '0'
-              }
-            }
-          },
-          isNotEmptyFilter: true,
-          title: 'cluster filter with "0" as string'
-        },
-        {
-          filter: {
-            version: 'a',
-            cluster: {
-              current: {
-                value: 'a'
-              }
-            }
-          },
-          isNotEmptyFilter: true,
-          title: 'both filters'
-        },
-        {
-          filter: {
-            version: '0',
-            cluster: {
-              current: {
-                value: '0'
-              }
-            }
-          },
-          isNotEmptyFilter: true,
-          title: 'both filters with "0" as string'
-        }
-      ];
-
-      cases.forEach(function (item) {
-        it(item.title, function () {
-          $httpBackend.expectGET(/\/api\/v1\/clusters\?_=\d+/).respond(200);
-          scope.filter = item.filter;
-          scope.$digest();
-          expect(scope.isNotEmptyFilter).toEqual(item.isNotEmptyFilter);
-        });
-      });
-
-    });
-
   });
 });


[32/50] [abbrv] ambari git commit: AMBARI-15097. Remove targeted orchestration for patches (ncole)

Posted by nc...@apache.org.
AMBARI-15097. Remove targeted orchestration for patches (ncole)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/9873e696
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/9873e696
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/9873e696

Branch: refs/heads/trunk
Commit: 9873e69624ca429446312dfdb736f8b3d3d572cf
Parents: 4b5a250
Author: Nate Cole <nc...@hortonworks.com>
Authored: Thu Feb 18 14:36:26 2016 -0500
Committer: Nate Cole <nc...@hortonworks.com>
Committed: Fri Feb 19 08:31:44 2016 -0500

----------------------------------------------------------------------
 .../ambari/annotations/ExperimentalFeature.java |  7 +-
 .../ClusterStackVersionResourceProvider.java    | 27 +-------
 .../internal/UpgradeResourceProvider.java       | 32 +--------
 .../ambari/server/state/UpgradeContext.java     |  6 ++
 .../state/repository/VersionDefinitionXml.java  | 43 ++++++++----
 .../src/main/resources/version_definition.xsd   |  2 +-
 ...ClusterStackVersionResourceProviderTest.java |  6 +-
 .../state/repository/VersionDefinitionTest.java | 22 ++++++
 .../version_definition_test_all_services.xml    | 73 ++++++++++++++++++++
 9 files changed, 148 insertions(+), 70 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/9873e696/ambari-server/src/main/java/org/apache/ambari/annotations/ExperimentalFeature.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/annotations/ExperimentalFeature.java b/ambari-server/src/main/java/org/apache/ambari/annotations/ExperimentalFeature.java
index f29ed40..1d5ba0e 100644
--- a/ambari-server/src/main/java/org/apache/ambari/annotations/ExperimentalFeature.java
+++ b/ambari-server/src/main/java/org/apache/ambari/annotations/ExperimentalFeature.java
@@ -35,5 +35,10 @@ public enum ExperimentalFeature {
    * The caching of current alert information in order to reduce overall load on
    * the database by preventing frequent updates and JPA entity invalidation.
    */
-  ALERT_CACHING
+  ALERT_CACHING,
+
+  /**
+   * Used for code that is targeted for patch upgrades
+   */
+  PATCH_UPGRADES
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/9873e696/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClusterStackVersionResourceProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClusterStackVersionResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClusterStackVersionResourceProvider.java
index ce5606e..07e62b3 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClusterStackVersionResourceProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClusterStackVersionResourceProvider.java
@@ -73,14 +73,11 @@ import org.apache.ambari.server.state.Clusters;
 import org.apache.ambari.server.state.ComponentInfo;
 import org.apache.ambari.server.state.Host;
 import org.apache.ambari.server.state.MaintenanceState;
-import org.apache.ambari.server.state.RepositoryType;
 import org.apache.ambari.server.state.RepositoryVersionState;
 import org.apache.ambari.server.state.ServiceComponentHost;
 import org.apache.ambari.server.state.ServiceInfo;
 import org.apache.ambari.server.state.ServiceOsSpecific;
 import org.apache.ambari.server.state.StackId;
-import org.apache.ambari.server.state.repository.AvailableService;
-import org.apache.ambari.server.state.repository.VersionDefinitionXml;
 import org.apache.ambari.server.utils.StageUtils;
 import org.apache.commons.lang.StringUtils;
 
@@ -418,27 +415,9 @@ public class ClusterStackVersionResourceProvider extends AbstractControllerResou
 
       // determine services for the repo
       Set<String> serviceNames = new HashSet<>();
-      if (RepositoryType.STANDARD != repoVersionEnt.getType() && null != repoVersionEnt.getVersionXsd()) {
-        VersionDefinitionXml xml = null;
-        try {
-         xml = repoVersionEnt.getRepositoryXml();
-
-         Collection<AvailableService> available = xml.getAvailableServices(
-             ami.getStack(stackId.getStackName(), stackId.getStackVersion()));
-
-         // check if the service is part of the cluster
-         for (AvailableService as : available) {
-           if (cluster.getServices().containsKey(as.getName())) {
-             serviceNames.add(as.getName());
-           }
-         }
-
-        } catch (Exception e) {
-          String msg = String.format("Could not load repo xml for %s", repoVersionEnt.getDisplayName());
-          LOG.error(msg, e);
-          throw new SystemException (msg);
-        }
-      }
+
+      // !!! TODO for patch upgrades, we need to limit the serviceNames to those
+      // that are detailed for the repository
 
       // Populate with commands for host
       for (int i = 0; i < maxTasks && hostIterator.hasNext(); i++) {

http://git-wip-us.apache.org/repos/asf/ambari/blob/9873e696/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/UpgradeResourceProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/UpgradeResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/UpgradeResourceProvider.java
index 33b496f..860ba88 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/UpgradeResourceProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/UpgradeResourceProvider.java
@@ -24,7 +24,6 @@ import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.VERSION;
 import java.text.MessageFormat;
 import java.util.ArrayList;
 import java.util.Arrays;
-import java.util.Collection;
 import java.util.Collections;
 import java.util.EnumSet;
 import java.util.HashMap;
@@ -85,15 +84,12 @@ import org.apache.ambari.server.state.Clusters;
 import org.apache.ambari.server.state.Config;
 import org.apache.ambari.server.state.ConfigHelper;
 import org.apache.ambari.server.state.DesiredConfig;
-import org.apache.ambari.server.state.RepositoryType;
 import org.apache.ambari.server.state.ServiceInfo;
 import org.apache.ambari.server.state.StackId;
 import org.apache.ambari.server.state.StackInfo;
 import org.apache.ambari.server.state.UpgradeContext;
 import org.apache.ambari.server.state.UpgradeHelper;
 import org.apache.ambari.server.state.UpgradeHelper.UpgradeGroupHolder;
-import org.apache.ambari.server.state.repository.AvailableService;
-import org.apache.ambari.server.state.repository.VersionDefinitionXml;
 import org.apache.ambari.server.state.stack.ConfigUpgradePack;
 import org.apache.ambari.server.state.stack.PrereqCheckStatus;
 import org.apache.ambari.server.state.stack.UpgradePack;
@@ -718,31 +714,9 @@ public class UpgradeResourceProvider extends AbstractControllerResourceProvider
         RepositoryVersionEntity targetRepositoryVersion = s_repoVersionDAO.findByStackNameAndVersion(
             sourceStackId.getStackName(), version);
 
-        EnumSet<RepositoryType> serviceAware = EnumSet.of(RepositoryType.PATCH, RepositoryType.SERVICE);
-        if (serviceAware.contains(targetRepositoryVersion.getType())) {
-
-          VersionDefinitionXml xml = null;
-          StackInfo stackInfo = s_metaProvider.get().getStack(sourceStackId.getStackName(),
-              sourceStackId.getStackVersion());
-
-          try {
-            xml = targetRepositoryVersion.getRepositoryXml();
-          } catch (Exception e) {
-            throw new AmbariException(String.format("Could not load repository definition for version %s", version));
-          }
-
-          if (null != xml) {
-            Collection<AvailableService> services = xml.getAvailableServices(stackInfo);
-
-            for (AvailableService available : services) {
-              supportedServices.add(available.getName());
-            }
-
-            if (!services.isEmpty()) {
-              scope = UpgradeScope.PARTIAL;
-            }
-          }
-        }
+        // !!! TODO check the repo_version for patch-ness and restrict the context
+        // to those services that require it.  Consult the version definition and add the
+        // service names to supportedServices
 
         targetStackId = targetRepositoryVersion.getStackId();
         break;

http://git-wip-us.apache.org/repos/asf/ambari/blob/9873e696/ambari-server/src/main/java/org/apache/ambari/server/state/UpgradeContext.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/UpgradeContext.java b/ambari-server/src/main/java/org/apache/ambari/server/state/UpgradeContext.java
index 49b2b45..2f616e7 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/UpgradeContext.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/UpgradeContext.java
@@ -24,6 +24,8 @@ import java.util.List;
 import java.util.Map;
 import java.util.Set;
 
+import org.apache.ambari.annotations.Experimental;
+import org.apache.ambari.annotations.ExperimentalFeature;
 import org.apache.ambari.server.api.services.AmbariMetaInfo;
 import org.apache.ambari.server.stack.MasterHostResolver;
 import org.apache.ambari.server.state.stack.upgrade.Direction;
@@ -364,6 +366,7 @@ public class UpgradeContext {
    *
    * @param services  the set of specific services
    */
+  @Experimental(feature=ExperimentalFeature.PATCH_UPGRADES)
   public void setSupportedServices(Set<String> services) {
     m_supported = services;
   }
@@ -374,6 +377,7 @@ public class UpgradeContext {
    * @param serviceName the service name to check.
    * @return {@code true} when the service is supported
    */
+  @Experimental(feature=ExperimentalFeature.PATCH_UPGRADES)
   public boolean isServiceSupported(String serviceName) {
     if (m_supported.isEmpty() || m_supported.contains(serviceName)) {
       return true;
@@ -382,10 +386,12 @@ public class UpgradeContext {
     return false;
   }
 
+  @Experimental(feature=ExperimentalFeature.PATCH_UPGRADES)
   public void setScope(UpgradeScope scope) {
     m_scope = scope;
   }
 
+  @Experimental(feature=ExperimentalFeature.PATCH_UPGRADES)
   public boolean isScoped(UpgradeScope scope) {
     return m_scope.isScoped(scope);
   }

http://git-wip-us.apache.org/repos/asf/ambari/blob/9873e696/ambari-server/src/main/java/org/apache/ambari/server/state/repository/VersionDefinitionXml.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/repository/VersionDefinitionXml.java b/ambari-server/src/main/java/org/apache/ambari/server/state/repository/VersionDefinitionXml.java
index a610bd0..93ac767 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/repository/VersionDefinitionXml.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/repository/VersionDefinitionXml.java
@@ -100,27 +100,21 @@ public class VersionDefinitionXml {
    * @return a collection of AvailableServices used for web service consumption
    */
   public Collection<AvailableService> getAvailableServices(StackInfo stack) {
-    if (availableServices.isEmpty()) {
-      return Collections.emptyList();
-    }
-
     if (null == availableMap) {
       Map<String, ManifestService> manifests = buildManifest();
       availableMap = new HashMap<>();
 
-      for (AvailableServiceReference ref : availableServices) {
-        ManifestService ms = manifests.get(ref.serviceIdReference);
-        ServiceInfo service = stack.getService(ms.serviceName);
+      if (availableServices.isEmpty()) {
+        for (ManifestService ms : manifests.values()) {
+          addToAvailable(ms, stack, Collections.<String>emptySet());
+        }
 
-        if (!availableMap.containsKey(ms.serviceName)) {
-          String display = (null == service) ? ms.serviceName: service.getDisplayName();
+      } else {
+        for (AvailableServiceReference ref : availableServices) {
+          ManifestService ms = manifests.get(ref.serviceIdReference);
 
-          availableMap.put(ms.serviceName, new AvailableService(ms.serviceName, display));
+          addToAvailable(ms, stack, ref.components);
         }
-
-        AvailableService as = availableMap.get(ms.serviceName);
-        as.getVersions().add(new AvailableVersion(ms.version, ms.versionId,
-            buildComponents(service, ref.components)));
       }
     }
 
@@ -128,6 +122,27 @@ public class VersionDefinitionXml {
   }
 
   /**
+   * Helper method to use a {@link ManifestService} to generate the available services structure
+   * @param ms          the ManifestService instance
+   * @param stack       the stack object
+   * @param components  the set of components for the service
+   */
+  private void addToAvailable(ManifestService ms, StackInfo stack, Set<String> components) {
+    ServiceInfo service = stack.getService(ms.serviceName);
+
+    if (!availableMap.containsKey(ms.serviceName)) {
+      String display = (null == service) ? ms.serviceName: service.getDisplayName();
+
+      availableMap.put(ms.serviceName, new AvailableService(ms.serviceName, display));
+    }
+
+    AvailableService as = availableMap.get(ms.serviceName);
+    as.getVersions().add(new AvailableVersion(ms.version, ms.versionId,
+        buildComponents(service, components)));
+  }
+
+
+  /**
    * @return the list of manifest services to a map for easier access.
    */
   private Map<String, ManifestService> buildManifest() {

http://git-wip-us.apache.org/repos/asf/ambari/blob/9873e696/ambari-server/src/main/resources/version_definition.xsd
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/version_definition.xsd b/ambari-server/src/main/resources/version_definition.xsd
index 42035de..de0efd8 100644
--- a/ambari-server/src/main/resources/version_definition.xsd
+++ b/ambari-server/src/main/resources/version_definition.xsd
@@ -96,7 +96,7 @@
       </xs:documentation>
     </xs:annotation>
     <xs:sequence>
-      <xs:element name="service" maxOccurs="unbounded">
+      <xs:element name="service" minOccurs="0" maxOccurs="unbounded">
         <xs:complexType>
           <xs:sequence>
             <xs:element name="component" minOccurs="0" maxOccurs="unbounded" />

http://git-wip-us.apache.org/repos/asf/ambari/blob/9873e696/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ClusterStackVersionResourceProviderTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ClusterStackVersionResourceProviderTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ClusterStackVersionResourceProviderTest.java
index c6d0c57..69ed9d7 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ClusterStackVersionResourceProviderTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ClusterStackVersionResourceProviderTest.java
@@ -43,6 +43,8 @@ import java.util.Map;
 import java.util.Properties;
 import java.util.Set;
 
+import org.apache.ambari.annotations.Experimental;
+import org.apache.ambari.annotations.ExperimentalFeature;
 import org.apache.ambari.server.Role;
 import org.apache.ambari.server.actionmanager.ActionManager;
 import org.apache.ambari.server.actionmanager.ExecutionCommandWrapper;
@@ -102,6 +104,7 @@ import org.easymock.IAnswer;
 import org.junit.After;
 import org.junit.Assert;
 import org.junit.Before;
+import org.junit.Ignore;
 import org.junit.Test;
 
 import com.google.inject.AbstractModule;
@@ -355,7 +358,8 @@ public class ClusterStackVersionResourceProviderTest {
 
   }
 
-  @Test
+  @Experimental(feature=ExperimentalFeature.PATCH_UPGRADES)
+  @Ignore
   public void testCreateResourcesForPatch() throws Exception {
     Resource.Type type = Resource.Type.ClusterStackVersion;
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/9873e696/ambari-server/src/test/java/org/apache/ambari/server/state/repository/VersionDefinitionTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/state/repository/VersionDefinitionTest.java b/ambari-server/src/test/java/org/apache/ambari/server/state/repository/VersionDefinitionTest.java
index 4ba5967..f2939c7 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/state/repository/VersionDefinitionTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/state/repository/VersionDefinitionTest.java
@@ -23,6 +23,8 @@ import static org.junit.Assert.assertNotNull;
 import java.io.File;
 
 import org.apache.ambari.server.state.RepositoryType;
+import org.apache.ambari.server.state.ServiceInfo;
+import org.apache.ambari.server.state.StackInfo;
 import org.apache.commons.io.FileUtils;
 import org.junit.Test;
 
@@ -80,4 +82,24 @@ public class VersionDefinitionTest {
     assertEquals("HDP", xml.repositoryInfo.getOses().get(0).getRepos().get(0).getRepoName());
   }
 
+  @Test
+  public void testAllServices() throws Exception {
+
+    File f = new File("src/test/resources/version_definition_test_all_services.xml");
+
+    VersionDefinitionXml xml = VersionDefinitionXml.load(f.toURI().toURL());
+
+    StackInfo stack = new StackInfo() {
+      @Override
+      public ServiceInfo getService(String name) {
+        return null;
+      }
+    };
+
+    // the file does not define available services, which
+    assertEquals(4, xml.manifestServices.size());
+    assertEquals(3, xml.getAvailableServices(stack).size());
+  }
+
+
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/9873e696/ambari-server/src/test/resources/version_definition_test_all_services.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/resources/version_definition_test_all_services.xml b/ambari-server/src/test/resources/version_definition_test_all_services.xml
new file mode 100644
index 0000000..7557f7a
--- /dev/null
+++ b/ambari-server/src/test/resources/version_definition_test_all_services.xml
@@ -0,0 +1,73 @@
+<?xml version="1.0"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+
+<repository-version xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+  xsi:noNamespaceSchemaLocation="version_definition.xsd">
+  
+  <release>
+    <type>PATCH</type>
+    <stack-id>HDP-2.3</stack-id>
+    <version>2.3.4.1</version>
+    <build>1234</build>
+    <compatible-with>2.3.4.[1-9]</compatible-with>
+    <release-notes>http://docs.hortonworks.com/HDPDocuments/HDP2/HDP-2.3.4/</release-notes>
+  </release>
+  
+  <manifest>
+    <service id="HDFS-271" name="HDFS" version="2.7.1" version-id="10" />
+    <service id="HIVE-110" name="HIVE" version="1.1.0" />
+    <service id="HIVE-200" name="HIVE" version="2.0.0" />
+    <service id="HBASE-899" name="HBASE" version="8.9.9" />
+  </manifest>
+  
+  <available-services />
+  
+  <repository-info>
+    <os family="redhat6">
+      <repo>
+        <baseurl>http://public-repo-1.hortonworks.com/HDP/centos6/2.x/updates/2.3.0.0</baseurl>
+        <repoid>HDP-2.3</repoid>
+        <reponame>HDP</reponame>
+      </repo>
+      <repo>
+        <baseurl>http://public-repo-1.hortonworks.com/HDP-UTILS-1.1.0.20/repos/centos6</baseurl>
+        <repoid>HDP-UTILS-1.1.0.20</repoid>
+        <reponame>HDP-UTILS</reponame>
+      </repo>
+    </os>
+    <os family="redhat7">
+      <repo>
+        <baseurl>http://public-repo-1.hortonworks.com/HDP/centos7/2.x/updates/2.3.0.0</baseurl>
+        <repoid>HDP-2.3</repoid>
+        <reponame>HDP</reponame>
+      </repo>
+      <repo>
+        <baseurl>http://public-repo-1.hortonworks.com/HDP-UTILS-1.1.0.20/repos/centos7</baseurl>
+        <repoid>HDP-UTILS-1.1.0.20</repoid>
+        <reponame>HDP-UTILS</reponame>
+      </repo>
+    </os>
+
+  </repository-info>
+  
+  <upgrade>
+    <configuration type="hdfs-site">
+      <set key="foo" value="bar" />
+    </configuration>
+  </upgrade>
+</repository-version>


[14/50] [abbrv] ambari git commit: AMBARI-14795. Register Version: add ability to load new version definition file.

Posted by nc...@apache.org.
AMBARI-14795. Register Version: add ability to load new version definition file.


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/e4d03b5b
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/e4d03b5b
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/e4d03b5b

Branch: refs/heads/trunk
Commit: e4d03b5b5879aff474abdbb3a9d571215383a6c4
Parents: c72dc41
Author: Xi Wang <xi...@apache.org>
Authored: Fri Jan 22 17:24:01 2016 -0800
Committer: Xi Wang <xi...@apache.org>
Committed: Wed Feb 3 13:55:05 2016 -0800

----------------------------------------------------------------------
 .../app/assets/data/version/version.json        |  38 ++++++-
 .../stackVersions/StackVersionsCreateCtrl.js    | 100 +++++++++++++++++++
 .../ui/admin-web/app/scripts/i18n.config.js     |   4 +
 .../ui/admin-web/app/scripts/services/Stack.js  |  38 +++++++
 .../resources/ui/admin-web/app/styles/main.css  |  16 ++-
 .../views/stackVersions/stackVersionPage.html   |  31 ++++++
 6 files changed, 225 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/e4d03b5b/ambari-admin/src/main/resources/ui/admin-web/app/assets/data/version/version.json
----------------------------------------------------------------------
diff --git a/ambari-admin/src/main/resources/ui/admin-web/app/assets/data/version/version.json b/ambari-admin/src/main/resources/ui/admin-web/app/assets/data/version/version.json
index 7c604f1..b5b3601 100644
--- a/ambari-admin/src/main/resources/ui/admin-web/app/assets/data/version/version.json
+++ b/ambari-admin/src/main/resources/ui/admin-web/app/assets/data/version/version.json
@@ -14,7 +14,43 @@
             "id" : 15,
             "repository_version" : "2.2.0.1-901",
             "stack_name" : "HDP",
-            "stack_version" : "2.2"
+            "stack_version" : "2.2",
+            "release": {
+              "type": "PATCH",
+              "stack_id": "HDP-2.2",
+              "version": "2.2.0.1",
+              "build": "901",
+              "compatible_with": "2.2.0.1-[1-9]",
+              "release_notes": "http://someurl"
+            },
+            "services": [
+              {
+                "name": "HDFS",
+                "versions": [
+                  {
+                    "version": "2.1.1",
+                    "version_id": "10",
+                    "components": [ "NAMENODE"]
+                  }
+                ]
+              },
+              {
+                "name": "HIVE",
+                "versions": [
+                  {
+                    "version": "1.2.1"
+                  }
+                ]
+              },
+              {
+                "name": "ZOOKEEPER",
+                "versions": [
+                  {
+                    "version": "3.4.5"
+                  }
+                ]
+              }
+            ]
           },
           "operating_systems" : [
             {

http://git-wip-us.apache.org/repos/asf/ambari/blob/e4d03b5b/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/stackVersions/StackVersionsCreateCtrl.js
----------------------------------------------------------------------
diff --git a/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/stackVersions/StackVersionsCreateCtrl.js b/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/stackVersions/StackVersionsCreateCtrl.js
index a817d88..40aa103 100644
--- a/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/stackVersions/StackVersionsCreateCtrl.js
+++ b/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/stackVersions/StackVersionsCreateCtrl.js
@@ -151,4 +151,104 @@ angular.module('ambariAdminConsole')
     $location.path('/stackVersions');
   };
 
+  // two options to upload version definition file
+  $scope.option1 = {
+    index: 1,
+    displayName: 'Upload Version Definition File',
+    url: 'files://',
+    selected: true,
+    hasError: true
+  };
+  $scope.option2 = {
+    index: 2,
+    displayName: 'Version Definition File URL',
+    url: 'https://',
+    selected: false,
+    hasError: false
+  };
+  $scope.selectedOption = 1;
+
+  /**
+   * User can select ONLY one option to upload version definition file
+   */
+  $scope.toggleOptionSelect = function () {
+    $scope.option1.selected = $scope.selectedOption == $scope.option1.index;
+    $scope.option2.selected = $scope.selectedOption == $scope.option2.index;
+    $scope.option1.hasError = false;
+    $scope.option2.hasError = false;
+  };
+  $scope.clearOptionsError = function () {
+    $scope.option1.hasError = false;
+    $scope.option2.hasError = false;
+  };
+  $scope.readInfoButtonDisabled = function () {
+    return $scope.option1.selected ? !$scope.option1.url : !$scope.option2.url;
+  };
+
+  $scope.onFileSelect = function(){
+    return {
+      link: function($scope,el){
+        el.bind("change", function(e){
+          $scope.file = (e.srcElement || e.target).files[0];
+          $scope.getFile();
+        })
+      }
+    }
+  };
+
+//  $scope.uploadFile = function(){
+//    var file = $scope.myFile;
+//    console.log('file is ' );
+//    console.dir(file);
+//    var uploadUrl = "/fileUpload";
+//    fileUpload.uploadFileToUrl(file, uploadUrl);
+//  };
+
+  /**
+   * Load selected file to current page content
+   */
+  $scope.readVersionInfo = function(){
+    if ($scope.option2.selected) {
+      var url = $scope.option2.url;
+    }
+    /// POST url first then get the version definition info
+    return Stack.getLatestRepo('HDP').then(function (response) {
+      $scope.id = response.id;
+      $scope.isPatch = response.type == 'PATCH';
+      $scope.stackNameVersion = response.stackNameVersion;
+      $scope.displayName = response.displayName;
+      $scope.version = response.version;
+      $scope.actualVersion = response.actualVersion;
+      $scope.services = response.services;
+      //save default values of repos to check if they were changed
+      $scope.defaulfOSRepos = {};
+      response.updateObj.operating_systems.forEach(function(os) {
+        $scope.defaulfOSRepos[os.OperatingSystems.os_type] = {
+          defaultBaseUrl: os.repositories[0].Repositories.base_url,
+          defaultUtilsUrl: os.repositories[1].Repositories.base_url
+        };
+      });
+      $scope.repoVersionFullName = response.repoVersionFullName;
+      angular.forEach(response.osList, function (os) {
+        os.selected = true;
+      });
+      $scope.selectedOS = response.osList.length;
+      $scope.osList = response.osList;
+      // if user reach here from UI click, repo status should be cached
+      // otherwise re-fetch repo status from cluster end point.
+      $scope.repoStatus = Cluster.repoStatusCache[$scope.id];
+      if (!$scope.repoStatus) {
+        $scope.fetchClusters()
+          .then(function () {
+            return $scope.fetchRepoClusterStatus();
+          })
+          .then(function () {
+            $scope.deleteEnabled = $scope.isDeletable();
+          });
+      } else {
+        $scope.deleteEnabled = $scope.isDeletable();
+      }
+      $scope.addMissingOSList();
+    });
+  };
 }]);

http://git-wip-us.apache.org/repos/asf/ambari/blob/e4d03b5b/ambari-admin/src/main/resources/ui/admin-web/app/scripts/i18n.config.js
----------------------------------------------------------------------
diff --git a/ambari-admin/src/main/resources/ui/admin-web/app/scripts/i18n.config.js b/ambari-admin/src/main/resources/ui/admin-web/app/scripts/i18n.config.js
index 086bc13..21622b5 100644
--- a/ambari-admin/src/main/resources/ui/admin-web/app/scripts/i18n.config.js
+++ b/ambari-admin/src/main/resources/ui/admin-web/app/scripts/i18n.config.js
@@ -293,6 +293,10 @@ angular.module('ambariAdminConsole')
       'current': 'Current',
       'inUse': 'In Use',
       'installed': 'Installed',
+      'uploadFile': 'Upload Version Definition File',
+      'enterURL': 'Version Definition File URL',
+      'readInfo': 'Read Version Info',
+      'browse': 'Browse',
       'installOn': 'Install on...',
       'register': 'Register Version',
       'deregister': 'Deregister Version',

http://git-wip-us.apache.org/repos/asf/ambari/blob/e4d03b5b/ambari-admin/src/main/resources/ui/admin-web/app/scripts/services/Stack.js
----------------------------------------------------------------------
diff --git a/ambari-admin/src/main/resources/ui/admin-web/app/scripts/services/Stack.js b/ambari-admin/src/main/resources/ui/admin-web/app/scripts/services/Stack.js
index ffed3cc..a28943c 100644
--- a/ambari-admin/src/main/resources/ui/admin-web/app/scripts/services/Stack.js
+++ b/ambari-admin/src/main/resources/ui/admin-web/app/scripts/services/Stack.js
@@ -173,6 +173,44 @@ angular.module('ambariAdminConsole')
       return deferred.promise;
     },
 
+    getLatestRepo: function (stack_name) {
+      var url = Settings.baseUrl + '/stacks/' + stack_name + '/versions?' +
+        'fields=repository_versions/operating_systems/repositories/*' +
+        ',repository_versions/RepositoryVersions/*';
+      var deferred = $q.defer();
+      $http.get(url, {mock: 'version/version.json'})
+        .success(function (data) {
+          data = data.items[0];
+          var response = {
+            id : data.repository_versions[0].RepositoryVersions.id,
+            stackVersion : data.Versions.stack_version,
+            stackName: data.Versions.stack_name,
+            type: data.repository_versions[0].RepositoryVersions.release.type,
+            stackNameVersion: data.Versions.stack_name + '-' + data.Versions.stack_version, /// HDP-2.3
+            actualVersion: data.repository_versions[0].RepositoryVersions.repository_version, /// 2.3.4.0-3846
+            version: data.repository_versions[0].RepositoryVersions.release.version, /// 2.3.4.0
+            displayName: data.Versions.stack_name + '-' + data.repository_versions[0].RepositoryVersions.release.version, //HDP-2.3.4.0
+            repoVersionFullName : data.Versions.stack_name + '-' + data.repository_versions[0].RepositoryVersions.repository_version,
+            osList: data.repository_versions[0].operating_systems,
+            updateObj: data.repository_versions[0]
+          };
+          var services = [];
+          angular.forEach(data.repository_versions[0].RepositoryVersions.services, function (service) {
+            services.push({
+              name: service.name,
+              version: service.versions[0].version,
+              components: service.versions[0].components
+            });
+          });
+          response.services = services;
+          deferred.resolve(response);
+        })
+        .error(function (data) {
+          deferred.reject(data);
+        });
+      return deferred.promise;
+    },
+
     updateRepo: function (stackName, stackVersion, id, payload) {
       var url = Settings.baseUrl + '/stacks/' + stackName + '/versions/' + stackVersion + '/repository_versions/' + id;
       var deferred = $q.defer();

http://git-wip-us.apache.org/repos/asf/ambari/blob/e4d03b5b/ambari-admin/src/main/resources/ui/admin-web/app/styles/main.css
----------------------------------------------------------------------
diff --git a/ambari-admin/src/main/resources/ui/admin-web/app/styles/main.css b/ambari-admin/src/main/resources/ui/admin-web/app/styles/main.css
index cc57fa3..de3d526 100644
--- a/ambari-admin/src/main/resources/ui/admin-web/app/styles/main.css
+++ b/ambari-admin/src/main/resources/ui/admin-web/app/styles/main.css
@@ -1383,4 +1383,18 @@ thead.view-permission-header > tr > th {
 
 .test-ldap-icon.ng-hide-add-active, .test-ldap-icon.ng-hide-remove {
   display: inline-block!important;
-}
\ No newline at end of file
+}
+
+.register-version-options .read-info-button {
+  margin-bottom: 10px;
+}
+
+.register-version-options .option-radio-button {
+  padding-top: 5px;
+}
+
+.register-version-options .choose-file-input {
+  padding-top: 6px;
+  padding-bottom: 20px;
+}
+

http://git-wip-us.apache.org/repos/asf/ambari/blob/e4d03b5b/ambari-admin/src/main/resources/ui/admin-web/app/views/stackVersions/stackVersionPage.html
----------------------------------------------------------------------
diff --git a/ambari-admin/src/main/resources/ui/admin-web/app/views/stackVersions/stackVersionPage.html b/ambari-admin/src/main/resources/ui/admin-web/app/views/stackVersions/stackVersionPage.html
index 9b7e032..e23aecb 100644
--- a/ambari-admin/src/main/resources/ui/admin-web/app/views/stackVersions/stackVersionPage.html
+++ b/ambari-admin/src/main/resources/ui/admin-web/app/views/stackVersions/stackVersionPage.html
@@ -29,6 +29,37 @@
   </div>
 </div>
 <hr>
+
+
+
+<div class="clearfix register-version-options">
+  <div class="col-sm-5 option-radio-button">
+    <label class="option-label">
+      <input type="radio" ng-model="selectedOption" value="{{option1.index}}" ng-change="toggleOptionSelect()"> {{'versions.uploadFile' | translate}}
+    </label>
+  </div>
+  <div class="col-sm-7">
+    <input type="file" class="choose-file-input" ng-file-select="onFileSelect($files)"/>
+  </div>
+</div>
+<div class="clearfix register-version-options border-bottom bottom-margin">
+  <div class="col-sm-5 option-radio-button">
+    <label class="option-label">
+      <input type="radio" ng-model="selectedOption" value="{{option2.index}}" ng-change="toggleOptionSelect()"> {{'versions.enterURL' | translate}}
+    </label>
+  </div>
+    <div class="col-sm-7">
+      <div class="form-group {{option2.name}}" ng-class="{'has-error': option2.url.hasError }">
+        <div class=""><input type="text" class="form-control" ng-model="option2.url" ng-change="clearOptionsError()" ng-disabled="!option2.selected"></div>
+      </div>
+    </div>
+    <div class="col-sm-12 read-info-button">
+    <button class="btn btn-primary pull-right" ng-click="readVersionInfo()"
+        ng-disabled="readInfoButtonDisabled()">{{'versions.readInfo' | translate}}</button>
+    </div>
+</div>
+
+
 <form class="form-horizontal register-version-form" role="form" name="repoRegForm" novalidate>
   <div class="panel panel-default" ng-if="createController">
     <div class="panel-heading">


[30/50] [abbrv] ambari git commit: Merge branch 'trunk' into branch-dev-patch-upgrade

Posted by nc...@apache.org.
Merge branch 'trunk' into branch-dev-patch-upgrade


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/418745d1
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/418745d1
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/418745d1

Branch: refs/heads/trunk
Commit: 418745d1297e41d08ba522544ffb17d3bbb22f77
Parents: e4d1475 f927149
Author: Nate Cole <nc...@hortonworks.com>
Authored: Thu Feb 18 08:33:10 2016 -0500
Committer: Nate Cole <nc...@hortonworks.com>
Committed: Thu Feb 18 08:33:10 2016 -0500

----------------------------------------------------------------------
 .../libraries/providers/hdfs_resource.py        |  63 +-
 .../libraries/resources/hdfs_resource.py        |  12 +
 ambari-funtest/pom.xml                          |  13 +-
 ambari-funtest/src/main/assemblies/empty.xml    |  22 +
 ambari-funtest/src/main/assemblies/funtest.xml  |  79 --
 ambari-metrics/ambari-metrics-grafana/README.md |  22 +-
 .../ambari-metrics/datasource.js                |  49 +-
 .../screenshots/21-multi-templating.png         | Bin 0 -> 92034 bytes
 .../conf/unix/ambari-metrics-monitor            |   6 +-
 .../conf/unix/ambari-metrics-collector          |   2 +-
 ambari-server/conf/unix/log4j.properties        |   9 +
 ambari-server/conf/windows/log4j.properties     |   9 +
 ambari-server/src/main/conf/log4j.properties    |   9 +
 .../api/resources/GroupResourceDefinition.java  |   1 +
 .../resources/ResourceInstanceFactoryImpl.java  |   4 +
 .../api/services/GroupPrivilegeService.java     |  76 ++
 .../server/api/services/GroupService.java       |  11 +
 .../server/checks/CheckDatabaseHelper.java      | 155 ++--
 .../internal/DefaultProviderModule.java         |   2 +
 .../GroupPrivilegeResourceProvider.java         | 237 ++++++
 .../ambari/server/controller/spi/Resource.java  |   2 +
 .../server/upgrade/UpgradeCatalog240.java       |  76 +-
 .../main/python/ambari_server/checkDatabase.py  |   7 +-
 .../1.6.1.2.2.0/package/scripts/params.py       |   1 +
 .../0.1.0/package/scripts/params.py             |   1 +
 .../0.5.0.2.1/package/scripts/params_linux.py   |   1 +
 .../HAWQ/2.0.0/configuration/gpcheck-env.xml    |  89 --
 .../HAWQ/2.0.0/configuration/hawq-check-env.xml |  89 ++
 .../common-services/HAWQ/2.0.0/kerberos.json    | 125 ++-
 .../common-services/HAWQ/2.0.0/metainfo.xml     |  16 +-
 .../HAWQ/2.0.0/package/scripts/common.py        |  63 +-
 .../2.0.0/package/scripts/hawq_constants.py     |   2 +-
 .../HAWQ/2.0.0/package/scripts/hawqmaster.py    |   2 +-
 .../HAWQ/2.0.0/package/scripts/hawqsegment.py   |   2 +-
 .../HAWQ/2.0.0/package/scripts/hawqstandby.py   |  10 +-
 .../HAWQ/2.0.0/package/scripts/master_helper.py |   2 +-
 .../HAWQ/2.0.0/package/scripts/params.py        |  21 +-
 .../HAWQ/2.0.0/package/scripts/utils.py         |  10 +-
 .../0.96.0.2.0/package/scripts/params_linux.py  |   1 +
 .../2.1.0.2.0/package/scripts/params_linux.py   |   1 +
 .../0.12.0.2.0/package/scripts/params_linux.py  |   1 +
 .../KAFKA/0.8.1.2.2/package/scripts/params.py   |   1 +
 .../0.5.0.2.2/package/scripts/params_linux.py   |   1 +
 .../MAHOUT/1.0.0.2.3/package/scripts/params.py  |   1 +
 .../4.0.0.2.0/package/scripts/params_linux.py   |   1 +
 .../0.12.0.2.0/package/scripts/params_linux.py  |   1 +
 .../0.60.0.2.2/package/scripts/params_linux.py  |   1 +
 .../SPARK/1.2.0.2.2/package/scripts/params.py   |   1 +
 .../0.9.1.2.1/package/scripts/params_linux.py   |   1 +
 .../0.4.0.2.1/package/scripts/params_linux.py   |   1 +
 .../2.1.0.2.0/package/scripts/params_linux.py   |   1 +
 .../custom_actions/scripts/ru_execute_tasks.py  |   2 +-
 .../main/resources/scripts/Ambaripreupload.py   |  54 +-
 .../2.0.6/hooks/before-START/scripts/params.py  |   1 +
 .../HDP/2.2/upgrades/nonrolling-upgrade-2.3.xml |   3 +-
 .../HDP/2.2/upgrades/nonrolling-upgrade-2.4.xml |   3 +-
 .../stacks/HDP/2.2/upgrades/upgrade-2.3.xml     |   2 +
 .../stacks/HDP/2.2/upgrades/upgrade-2.4.xml     |   2 +
 .../KAFKA/configuration/kafka-broker.xml        |   2 +-
 .../services/RANGER/themes/theme_version_2.json |  20 +-
 .../stacks/HDP/2.3/upgrades/config-upgrade.xml  |   5 +-
 .../HDP/2.3/upgrades/nonrolling-upgrade-2.4.xml |   3 +-
 .../stacks/HDP/2.3/upgrades/upgrade-2.4.xml     |   4 +
 .../stacks/HDP/2.4/upgrades/config-upgrade.xml  |   5 +-
 .../main/resources/stacks/HDP/2.5/metainfo.xml  |  25 +
 .../resources/stacks/HDP/2.5/repos/repoinfo.xml |  92 ++
 .../HDP/2.5/services/ACCUMULO/metainfo.xml      |  26 +
 .../stacks/HDP/2.5/services/ATLAS/metainfo.xml  |  26 +
 .../stacks/HDP/2.5/services/FALCON/metainfo.xml |  26 +
 .../stacks/HDP/2.5/services/FLUME/metainfo.xml  |  26 +
 .../stacks/HDP/2.5/services/HBASE/metainfo.xml  |  26 +
 .../stacks/HDP/2.5/services/HDFS/metainfo.xml   |  26 +
 .../stacks/HDP/2.5/services/HIVE/metainfo.xml   |  26 +
 .../stacks/HDP/2.5/services/KAFKA/metainfo.xml  |  26 +
 .../HDP/2.5/services/KERBEROS/metainfo.xml      |  25 +
 .../stacks/HDP/2.5/services/KNOX/metainfo.xml   |  26 +
 .../stacks/HDP/2.5/services/MAHOUT/metainfo.xml |  26 +
 .../stacks/HDP/2.5/services/OOZIE/metainfo.xml  |  26 +
 .../stacks/HDP/2.5/services/PIG/metainfo.xml    |  26 +
 .../stacks/HDP/2.5/services/RANGER/metainfo.xml |  29 +
 .../HDP/2.5/services/RANGER_KMS/metainfo.xml    |  29 +
 .../stacks/HDP/2.5/services/SLIDER/metainfo.xml |  26 +
 .../stacks/HDP/2.5/services/SPARK/metainfo.xml  |  29 +
 .../stacks/HDP/2.5/services/SQOOP/metainfo.xml  |  26 +
 .../stacks/HDP/2.5/services/STORM/metainfo.xml  |  27 +
 .../stacks/HDP/2.5/services/TEZ/metainfo.xml    |  26 +
 .../stacks/HDP/2.5/services/YARN/metainfo.xml   |  27 +
 .../HDP/2.5/services/ZOOKEEPER/metainfo.xml     |  26 +
 .../stacks/HDP/2.5/services/stack_advisor.py    |  22 +
 .../api/services/GroupPrivilegeServiceTest.java | 109 +++
 .../server/checks/CheckDatabaseHelperTest.java  |  29 +-
 .../GroupPrivilegeResourceProviderTest.java     | 362 ++++++++
 .../server/upgrade/UpgradeCatalog240Test.java   |  92 +-
 .../src/test/python/TestAmbariServer.py         |   3 +-
 ambari-server/src/test/python/TestSetupAgent.py |   2 +-
 .../custom_actions/test_ru_execute_tasks.py     |   4 +-
 .../AMBARI_METRICS/test_metrics_collector.py    |   6 +-
 .../stacks/2.0.6/HBASE/test_hbase_master.py     |  18 +-
 .../python/stacks/2.0.6/HDFS/test_namenode.py   |  54 +-
 .../stacks/2.0.6/HDFS/test_service_check.py     |   8 +-
 .../stacks/2.0.6/HIVE/test_hive_server.py       |  28 +-
 .../2.0.6/HIVE/test_hive_service_check.py       |  12 +-
 .../stacks/2.0.6/OOZIE/test_oozie_server.py     |  32 +-
 .../stacks/2.0.6/OOZIE/test_service_check.py    |  10 +-
 .../stacks/2.0.6/PIG/test_pig_service_check.py  |  12 +-
 .../stacks/2.0.6/YARN/test_historyserver.py     |  29 +-
 .../2.0.6/YARN/test_mapreduce2_service_check.py |  12 +-
 .../stacks/2.0.6/common/test_stack_advisor.py   |  12 +-
 .../stacks/2.1/FALCON/test_falcon_server.py     |  12 +-
 .../python/stacks/2.1/TEZ/test_service_check.py |  16 +-
 .../stacks/2.1/YARN/test_apptimelineserver.py   |   2 +-
 .../stacks/2.2/PIG/test_pig_service_check.py    |  12 +-
 .../stacks/2.2/SPARK/test_job_history_server.py |  11 +-
 .../2.3/MAHOUT/test_mahout_service_check.py     |   8 +-
 .../2.3/SPARK/test_spark_thrift_server.py       |   4 +-
 .../test/python/stacks/2.3/YARN/test_ats_1_5.py |  10 +-
 .../stacks/2.3/common/services-hawq-1-host.json |   2 +-
 .../2.3/common/services-hawq-3-hosts.json       |   2 +-
 .../2.3/common/services-hawq-pxf-hdfs.json      |   4 +-
 .../services-master_ambari_colo-3-hosts.json    |   2 +-
 .../services-master_standby_colo-3-hosts.json   |   2 +-
 .../common/services-normal-hawq-3-hosts.json    |   2 +-
 .../services-standby_ambari_colo-3-hosts.json   |   2 +-
 ambari-web/app/assets/test/tests.js             |   3 +
 .../hawq/activateStandby/step3_controller.js    |   2 +-
 .../alerts/definition_configs_controller.js     |   4 +-
 ambari-web/app/controllers/main/host.js         |  18 +
 .../controllers/main/host/combo_search_box.js   |  43 +-
 .../controllers/main/service/info/configs.js    |  23 +-
 ambari-web/app/controllers/main/service/item.js |  46 +-
 ambari-web/app/controllers/wizard.js            |   3 -
 .../app/controllers/wizard/step7_controller.js  |   6 +-
 ambari-web/app/data/HDP2.3/site_properties.js   |   4 +-
 ambari-web/app/messages.js                      |   5 +-
 .../app/mixins/common/configs/configs_saver.js  |   4 -
 .../app/mixins/common/widgets/widget_mixin.js   |  70 +-
 .../mixins/wizard/assign_master_components.js   |  58 +-
 ambari-web/app/models/alerts/alert_config.js    |   4 +-
 .../configs/objects/service_config_property.js  |   2 +
 ambari-web/app/models/host_component.js         |  27 +-
 ambari-web/app/models/service.js                |   2 +-
 ambari-web/app/models/stack_service.js          |   4 +-
 .../main/host/details/host_component.hbs        |   2 +-
 ambari-web/app/utils/ajax/ajax.js               |  12 +
 ambari-web/app/utils/config.js                  | 151 ++--
 ambari-web/app/utils/helper.js                  |  14 +
 .../app/views/common/chart/linear_time.js       |  88 +-
 ambari-web/app/views/common/controls_view.js    |  75 +-
 .../main/admin/stack_upgrade/versions_view.js   |   6 +-
 .../app/views/main/host/combo_search_box.js     |  55 +-
 .../main/host/details/host_component_view.js    |   7 +-
 ambari-web/app/views/main/service/info/menu.js  |  45 +-
 .../app/views/main/service/info/summary.js      | 154 ++--
 ambari-web/app/views/main/service/item.js       |  41 +-
 .../definitions_configs_controller_test.js      |   4 +-
 .../service/reassign/step4_controller_test.js   |   5 +-
 .../test/controllers/wizard/step7_test.js       |  26 -
 .../test/mixins/common/widget_mixin_test.js     |  51 +-
 .../wizard/assign_master_components_test.js     | 154 ++++
 .../test/models/alerts/alert_config_test.js     |   8 +-
 ambari-web/test/utils/ajax/ajax_test.js         |  32 +
 ambari-web/test/utils/config_test.js            | 242 ------
 .../test/views/common/chart/linear_time_test.js |  13 +-
 .../stack_upgrade/upgrade_wizard_view_test.js   |   5 -
 .../service/info/component_list_view_test.js    | 153 ++++
 .../test/views/main/service/info/config_test.js |  99 ++-
 .../test/views/main/service/info/menu_test.js   |  93 ++
 .../views/main/service/info/summary_test.js     | 852 +++++++++++++------
 docs/pom.xml                                    |   6 +-
 169 files changed, 4203 insertions(+), 1583 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/418745d1/ambari-server/src/main/java/org/apache/ambari/server/api/resources/ResourceInstanceFactoryImpl.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/418745d1/ambari-server/src/main/java/org/apache/ambari/server/controller/spi/Resource.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/418745d1/ambari-web/app/messages.js
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/418745d1/ambari-web/app/views/main/admin/stack_upgrade/versions_view.js
----------------------------------------------------------------------


[10/50] [abbrv] ambari git commit: AMBARI-14867. Use entities for parent/child repository versions (ncole)

Posted by nc...@apache.org.
AMBARI-14867. Use entities for parent/child repository versions (ncole)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/c84da249
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/c84da249
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/c84da249

Branch: refs/heads/trunk
Commit: c84da24939b691951244840e07526a0ed9dee30a
Parents: 73aee31
Author: Nate Cole <nc...@hortonworks.com>
Authored: Mon Feb 1 12:46:26 2016 -0500
Committer: Nate Cole <nc...@hortonworks.com>
Committed: Mon Feb 1 16:33:19 2016 -0500

----------------------------------------------------------------------
 .../server/configuration/Configuration.java     |  83 +++++++---
 .../RepositoryVersionResourceProvider.java      |  14 ++
 .../VersionDefinitionResourceProvider.java      | 149 ++++++++++++++---
 .../orm/entities/RepositoryVersionEntity.java   |  30 ++++
 .../main/resources/Ambari-DDL-Derby-CREATE.sql  |   1 +
 .../main/resources/Ambari-DDL-MySQL-CREATE.sql  |   1 +
 .../main/resources/Ambari-DDL-Oracle-CREATE.sql |   1 +
 .../resources/Ambari-DDL-Postgres-CREATE.sql    |   1 +
 .../Ambari-DDL-Postgres-EMBEDDED-CREATE.sql     |   1 +
 .../resources/Ambari-DDL-SQLAnywhere-CREATE.sql |   1 +
 .../resources/Ambari-DDL-SQLServer-CREATE.sql   |   1 +
 .../RepositoryVersionResourceProviderTest.java  |  60 -------
 .../VersionDefinitionResourceProviderTest.java  | 160 +++++++++++++++++++
 .../version_definition_resource_provider.xml    |  13 +-
 14 files changed, 398 insertions(+), 118 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/c84da249/ambari-server/src/main/java/org/apache/ambari/server/configuration/Configuration.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/configuration/Configuration.java b/ambari-server/src/main/java/org/apache/ambari/server/configuration/Configuration.java
index d1453a0..55eea7a 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/configuration/Configuration.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/configuration/Configuration.java
@@ -17,11 +17,20 @@
  */
 package org.apache.ambari.server.configuration;
 
-import com.google.gson.JsonElement;
-import com.google.gson.JsonObject;
-import com.google.gson.JsonParser;
-import com.google.inject.Inject;
-import com.google.inject.Singleton;
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.io.FileReader;
+import java.io.IOException;
+import java.io.InputStream;
+import java.security.cert.CertificateException;
+import java.security.interfaces.RSAPublicKey;
+import java.util.ArrayList;
+import java.util.EnumSet;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Properties;
 
 import org.apache.ambari.annotations.Experimental;
 import org.apache.ambari.annotations.ExperimentalFeature;
@@ -34,32 +43,23 @@ import org.apache.ambari.server.orm.entities.StageEntity;
 import org.apache.ambari.server.security.ClientSecurityType;
 import org.apache.ambari.server.security.authorization.LdapServerProperties;
 import org.apache.ambari.server.security.authorization.jwt.JwtAuthenticationProperties;
+import org.apache.ambari.server.security.encryption.CertificateUtils;
 import org.apache.ambari.server.security.encryption.CredentialProvider;
 import org.apache.ambari.server.state.stack.OsFamily;
-import org.apache.ambari.server.security.encryption.CertificateUtils;
 import org.apache.ambari.server.utils.Parallel;
 import org.apache.ambari.server.utils.ShellCommandUtil;
 import org.apache.commons.io.FileUtils;
 import org.apache.commons.lang.RandomStringUtils;
 import org.apache.commons.lang.StringUtils;
+import org.apache.commons.lang.math.NumberUtils;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import java.io.File;
-import java.io.FileNotFoundException;
-import java.io.FileReader;
-import java.io.IOException;
-import java.io.InputStream;
-import java.util.ArrayList;
-import java.util.EnumSet;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Map.Entry;
-import java.util.Properties;
-
-import java.security.cert.CertificateException;
-import java.security.interfaces.RSAPublicKey;
+import com.google.gson.JsonElement;
+import com.google.gson.JsonObject;
+import com.google.gson.JsonParser;
+import com.google.inject.Inject;
+import com.google.inject.Singleton;
 
 
 /**
@@ -545,6 +545,26 @@ public class Configuration {
   public static final String VIEWS_HTTP_X_XSS_PROTECTION_HEADER_VALUE_KEY = "views.http.x-xss-protection";
   public static final String VIEWS_HTTP_X_XSS_PROTECTION_HEADER_VALUE_DEFAULT = "1; mode=block";
 
+  /*
+   * Version Definition URL
+   */
+  /**
+   * The connection timeout for reading version definitions.
+   */
+  private static final String VERSION_DEFINITION_CONNECT_TIMEOUT = "server.version_definition.connect.timeout.millis";
+  /**
+   * Default connect timeout for reading version definitions.
+   */
+  private static final int VERSION_DEFINITION_CONNECT_TIMEOUT_DEFAULT = 5000;
+  /**
+   * The read timeout for reading version definitions.
+   */
+  private static final String VERSION_DEFINITION_READ_TIMEOUT = "server.version_definition.read.timeout.millis";
+  /**
+   * Default read timeout for reading version definitions.
+   */
+  private static final int VERSION_DEFINITION_READ_TIMEOUT_DEFAULT = 5000;
+
   private static final Logger LOG = LoggerFactory.getLogger(
       Configuration.class);
 
@@ -1929,7 +1949,7 @@ public class Configuration {
 
   /**
    * Get property-providers' thread pool core size.
-   * 
+   *
    * @return the property-providers' thread pool core size
    */
   public int getPropertyProvidersThreadPoolCoreSize() {
@@ -1939,7 +1959,7 @@ public class Configuration {
 
   /**
    * Get property-providers' thread pool max size.
-   * 
+   *
    * @return the property-providers' thread pool max size
    */
   public int getPropertyProvidersThreadPoolMaxSize() {
@@ -2472,4 +2492,21 @@ public class Configuration {
     }
     return attempts;
   }
+
+  /**
+   * @return the connect timeout used when loading a version definition URL.
+   */
+  public int getVersionDefinitionConnectTimeout() {
+    return NumberUtils.toInt(
+        properties.getProperty(VERSION_DEFINITION_CONNECT_TIMEOUT),
+            VERSION_DEFINITION_CONNECT_TIMEOUT_DEFAULT);
+  }
+  /**
+   * @return the read timeout used when loading a version definition URL
+   */
+  public int getVersionDefinitionReadTimeout() {
+    return NumberUtils.toInt(
+        properties.getProperty(VERSION_DEFINITION_READ_TIMEOUT),
+            VERSION_DEFINITION_READ_TIMEOUT_DEFAULT);
+  }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/c84da249/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/RepositoryVersionResourceProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/RepositoryVersionResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/RepositoryVersionResourceProvider.java
index 873733d..87731c4 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/RepositoryVersionResourceProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/RepositoryVersionResourceProvider.java
@@ -96,6 +96,9 @@ public class RepositoryVersionResourceProvider extends AbstractAuthorizedResourc
   public static final String REPOSITORY_VERSION_RELEASE_COMPATIBLE_WITH        = "RepositoryVersions/release/compatible_with";
   public static final String REPOSITORY_VERSION_AVAILABLE_SERVICES             = "RepositoryVersions/services";
 
+  public static final String REPOSITORY_VERSION_PARENT_ID                      = "RepositoryVersions/parent_id";
+  public static final String REPOSITORY_VERSION_HAS_CHILDREN                   = "RepositoryVersions/has_children";
+
   @SuppressWarnings("serial")
   private static Set<String> pkPropertyIds = new HashSet<String>() {
     {
@@ -116,6 +119,8 @@ public class RepositoryVersionResourceProvider extends AbstractAuthorizedResourc
       REPOSITORY_VERSION_RELEASE_COMPATIBLE_WITH,
       REPOSITORY_VERSION_RELEASE_NOTES,
       REPOSITORY_VERSION_RELEASE_VERSION,
+      REPOSITORY_VERSION_PARENT_ID,
+      REPOSITORY_VERSION_HAS_CHILDREN,
       REPOSITORY_VERSION_AVAILABLE_SERVICES);
 
   @SuppressWarnings("serial")
@@ -157,6 +162,7 @@ public class RepositoryVersionResourceProvider extends AbstractAuthorizedResourc
    */
   public RepositoryVersionResourceProvider() {
     super(propertyIds, keyPropertyIds);
+
     setRequiredCreateAuthorizations(EnumSet.of(RoleAuthorization.AMBARI_MANAGE_STACK_VERSIONS));
     setRequiredDeleteAuthorizations(EnumSet.of(RoleAuthorization.AMBARI_MANAGE_STACK_VERSIONS));
     setRequiredUpdateAuthorizations(EnumSet.of(RoleAuthorization.AMBARI_MANAGE_STACK_VERSIONS, RoleAuthorization.AMBARI_EDIT_STACK_REPOS));
@@ -226,6 +232,7 @@ public class RepositoryVersionResourceProvider extends AbstractAuthorizedResourc
     final Set<String> requestedIds = getRequestPropertyIds(request, predicate);
     final Set<Map<String, Object>> propertyMaps = getPropertyMaps(predicate);
 
+
     List<RepositoryVersionEntity> requestedEntities = new ArrayList<RepositoryVersionEntity>();
     for (Map<String, Object> propertyMap: propertyMaps) {
       final StackId stackId = getStackInformationFromUrl(propertyMap);
@@ -240,6 +247,7 @@ public class RepositoryVersionResourceProvider extends AbstractAuthorizedResourc
           throw new SystemException("Repository version should have numerical id");
         }
         final RepositoryVersionEntity entity = repositoryVersionDAO.findByPK(id);
+
         if (entity == null) {
           throw new NoSuchResourceException("There is no repository version with id " + id);
         } else {
@@ -258,6 +266,12 @@ public class RepositoryVersionResourceProvider extends AbstractAuthorizedResourc
       setResourceProperty(resource, REPOSITORY_VERSION_REPOSITORY_VERSION_PROPERTY_ID, entity.getVersion(), requestedIds);
       setResourceProperty(resource, REPOSITORY_VERSION_TYPE_PROPERTY_ID, entity.getType(), requestedIds);
 
+      setResourceProperty(resource, REPOSITORY_VERSION_PARENT_ID, entity.getParentId(), requestedIds);
+
+      List<RepositoryVersionEntity> children = entity.getChildren();
+      setResourceProperty(resource, REPOSITORY_VERSION_HAS_CHILDREN,
+          null != children && !children.isEmpty(), requestedIds);
+
       final VersionDefinitionXml xml;
 
       try {

http://git-wip-us.apache.org/repos/asf/ambari/blob/c84da249/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/VersionDefinitionResourceProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/VersionDefinitionResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/VersionDefinitionResourceProvider.java
index 4b0d3cc..3533293 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/VersionDefinitionResourceProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/VersionDefinitionResourceProvider.java
@@ -17,7 +17,9 @@
  */
 package org.apache.ambari.server.controller.internal;
 
-import java.net.URL;
+import java.io.InputStream;
+import java.net.URI;
+import java.util.ArrayList;
 import java.util.Collections;
 import java.util.EnumSet;
 import java.util.HashMap;
@@ -29,6 +31,8 @@ import java.util.Set;
 import org.apache.ambari.server.AmbariException;
 import org.apache.ambari.server.StaticallyInject;
 import org.apache.ambari.server.api.services.AmbariMetaInfo;
+import org.apache.ambari.server.configuration.ComponentSSLConfiguration;
+import org.apache.ambari.server.configuration.Configuration;
 import org.apache.ambari.server.controller.spi.NoSuchParentResourceException;
 import org.apache.ambari.server.controller.spi.NoSuchResourceException;
 import org.apache.ambari.server.controller.spi.Predicate;
@@ -44,11 +48,13 @@ import org.apache.ambari.server.orm.entities.RepositoryVersionEntity;
 import org.apache.ambari.server.orm.entities.StackEntity;
 import org.apache.ambari.server.security.authorization.ResourceType;
 import org.apache.ambari.server.security.authorization.RoleAuthorization;
+import org.apache.ambari.server.state.RepositoryType;
 import org.apache.ambari.server.state.StackId;
 import org.apache.ambari.server.state.StackInfo;
 import org.apache.ambari.server.state.repository.VersionDefinitionXml;
 import org.apache.ambari.server.state.stack.upgrade.RepositoryVersionHelper;
 import org.apache.commons.io.IOUtils;
+import org.apache.commons.lang.StringUtils;
 
 import com.google.common.collect.Sets;
 import com.google.inject.Inject;
@@ -67,6 +73,7 @@ public class VersionDefinitionResourceProvider extends AbstractAuthorizedResourc
   public static final String VERSION_DEF_STACK_NAME                  = "VersionDefinition/stack_name";
   public static final String VERSION_DEF_STACK_VERSION               = "VersionDefinition/stack_version";
 
+
   protected static final String VERSION_DEF_TYPE_PROPERTY_ID         = "VersionDefinition/type";
   protected static final String VERSION_DEF_DEFINITION_URL           = "VersionDefinition/version_url";
   protected static final String VERSION_DEF_FULL_VERSION             = "VersionDefinition/repository_version";
@@ -74,7 +81,7 @@ public class VersionDefinitionResourceProvider extends AbstractAuthorizedResourc
   protected static final String VERSION_DEF_RELEASE_BUILD            = "VersionDefinition/release/build";
   protected static final String VERSION_DEF_RELEASE_NOTES            = "VersionDefinition/release/notes";
   protected static final String VERSION_DEF_RELEASE_COMPATIBLE_WITH  = "VersionDefinition/release/compatible_with";
-    protected static final String VERSION_DEF_AVAILABLE_SERVICES     = "VersionDefinition/services";
+  protected static final String VERSION_DEF_AVAILABLE_SERVICES       = "VersionDefinition/services";
 
   @Inject
   private static RepositoryVersionDAO s_repoVersionDAO;
@@ -88,6 +95,9 @@ public class VersionDefinitionResourceProvider extends AbstractAuthorizedResourc
   @Inject
   private static StackDAO s_stackDAO;
 
+  @Inject
+  private static Configuration s_configuration;
+
   /**
    * Key property ids
    */
@@ -141,12 +151,13 @@ public class VersionDefinitionResourceProvider extends AbstractAuthorizedResourc
     Set<Map<String, Object>> requestProperties = request.getProperties();
 
     if (requestProperties.size() > 1) {
-      throw new SystemException("Cannot process more than one file per request");
+      throw new IllegalArgumentException("Cannot process more than one file per request");
     }
 
     final Map<String, Object> properties = requestProperties.iterator().next();
+
     if (!properties.containsKey(VERSION_DEF_DEFINITION_URL)) {
-      throw new SystemException(String.format("%s is required", VERSION_DEF_DEFINITION_URL));
+      throw new IllegalArgumentException(String.format("%s is required", VERSION_DEF_DEFINITION_URL));
     }
 
     RepositoryVersionEntity entity = createResources(new Command<RepositoryVersionEntity>() {
@@ -155,11 +166,15 @@ public class VersionDefinitionResourceProvider extends AbstractAuthorizedResourc
 
         String definitionUrl = (String) properties.get(VERSION_DEF_DEFINITION_URL);
 
-        RepositoryVersionEntity entity = toRepositoryVersionEntity(definitionUrl);
+        XmlHolder holder = loadXml(definitionUrl);
+
+        RepositoryVersionEntity entity = toRepositoryVersionEntity(holder);
 
         RepositoryVersionResourceProvider.validateRepositoryVersion(s_repoVersionDAO,
             s_metaInfo.get(), entity);
 
+        checkForParent(holder, entity);
+
         s_repoVersionDAO.create(entity);
 
         return entity;
@@ -220,6 +235,62 @@ public class VersionDefinitionResourceProvider extends AbstractAuthorizedResourc
     throw new SystemException("Cannot delete Version Definitions");
   }
 
+  /**
+   * In the case of a patch, check if there is a parent repo.
+   * @param entity the entity to check
+   */
+  private void checkForParent(XmlHolder holder, RepositoryVersionEntity entity) throws AmbariException {
+    if (entity.getType() != RepositoryType.PATCH) {
+      return;
+    }
+
+    List<RepositoryVersionEntity> entities = s_repoVersionDAO.findByStack(entity.getStackId());
+    if (entities.isEmpty()) {
+      throw new AmbariException(String.format("Patch %s was uploaded, but there are no repositories for %s",
+          entity.getVersion(), entity.getStackId().toString()));
+    }
+
+    List<RepositoryVersionEntity> matching = new ArrayList<>();
+
+    boolean emptyCompatible = StringUtils.isBlank(holder.xml.release.compatibleWith);
+
+    for (RepositoryVersionEntity candidate : entities) {
+      String baseVersion = candidate.getVersion();
+      if (baseVersion.lastIndexOf('-') > -1) {
+        baseVersion = baseVersion.substring(0,  baseVersion.lastIndexOf('-'));
+      }
+
+      if (emptyCompatible) {
+        if (baseVersion.equals(holder.xml.release.version)) {
+          matching.add(candidate);
+        }
+      } else {
+        if (baseVersion.matches(holder.xml.release.compatibleWith)) {
+          matching.add(candidate);
+        }
+      }
+    }
+
+    if (matching.isEmpty()) {
+      String format = "No versions matched pattern %s";
+
+      throw new AmbariException(String.format(format,
+          emptyCompatible ? holder.xml.release.version : holder.xml.release.compatibleWith));
+    } else if (matching.size() > 1) {
+      Set<String> versions= new HashSet<>();
+      for (RepositoryVersionEntity match : matching) {
+        versions.add(match.getVersion());
+      }
+
+      throw new AmbariException(String.format("More than one repository matches patch %s: %s",
+          entity.getVersion(), StringUtils.join(versions, ", ")));
+    }
+
+    RepositoryVersionEntity parent = matching.get(0);
+
+    entity.setParent(parent);
+  }
+
   @Override
   protected Set<String> getPKPropertyIds() {
     return PK_PROPERTY_IDS;
@@ -230,45 +301,63 @@ public class VersionDefinitionResourceProvider extends AbstractAuthorizedResourc
     return ResourceType.AMBARI;
   }
 
-  /**
-   * Transforms a XML version defintion to an entity
-   *
-   * @param definitionUrl the String URL for loading
-   * @return constructed entity
-   * @throws AmbariException if some properties are missing or json has incorrect structure
-   */
-  protected RepositoryVersionEntity toRepositoryVersionEntity(String definitionUrl) throws AmbariException {
-    final VersionDefinitionXml xml;
-    final String xmlString;
+  private XmlHolder loadXml(String definitionUrl) throws AmbariException {
+    XmlHolder holder = new XmlHolder();
+    holder.url = definitionUrl;
+
+    int connectTimeout = s_configuration.getVersionDefinitionConnectTimeout();
+    int readTimeout = s_configuration.getVersionDefinitionReadTimeout();
+
     try {
-      URL url = new URL(definitionUrl);
+      URI uri = new URI(definitionUrl);
+      InputStream stream = null;
 
-      xmlString = IOUtils.toString(url.openStream(), "UTF-8");
+      if (uri.getScheme().equalsIgnoreCase("file")) {
+        stream = uri.toURL().openStream();
+      } else {
+        URLStreamProvider provider = new URLStreamProvider(connectTimeout, readTimeout,
+            ComponentSSLConfiguration.instance());
 
-      xml = VersionDefinitionXml.load(xmlString);
+        stream = provider.readFrom(definitionUrl);
+      }
+
+      holder.xmlString = IOUtils.toString(stream, "UTF-8");
+      holder.xml = VersionDefinitionXml.load(holder.xmlString);
     } catch (Exception e) {
       String err = String.format("Could not load url from %s.  %s",
           definitionUrl, e.getMessage());
       throw new AmbariException(err, e);
     }
 
+    return holder;
+  }
+
+  /**
+   * Transforms a XML version defintion to an entity
+   *
+   * @param definitionUrl the String URL for loading
+   * @return constructed entity
+   * @throws AmbariException if some properties are missing or json has incorrect structure
+   */
+  protected RepositoryVersionEntity toRepositoryVersionEntity(XmlHolder holder) throws AmbariException {
+
     // !!! TODO validate parsed object graph
 
     RepositoryVersionEntity entity = new RepositoryVersionEntity();
 
-    StackId stackId = new StackId(xml.release.stackId);
+    StackId stackId = new StackId(holder.xml.release.stackId);
 
     StackEntity stackEntity = s_stackDAO.find(stackId.getStackName(), stackId.getStackVersion());
 
     entity.setStack(stackEntity);
     entity.setOperatingSystems(s_repoVersionHelper.get().serializeOperatingSystems(
-        xml.repositoryInfo.getRepositories()));
-    entity.setVersion(xml.release.getFullVersion());
-    entity.setDisplayName(stackId, xml.release);
-    entity.setType(xml.release.repositoryType);
-    entity.setVersionUrl(definitionUrl);
-    entity.setVersionXml(xmlString);
-    entity.setVersionXsd(xml.xsdLocation);
+        holder.xml.repositoryInfo.getRepositories()));
+    entity.setVersion(holder.xml.release.getFullVersion());
+    entity.setDisplayName(stackId, holder.xml.release);
+    entity.setType(holder.xml.release.repositoryType);
+    entity.setVersionUrl(holder.url);
+    entity.setVersionXml(holder.xmlString);
+    entity.setVersionXsd(holder.xml.xsdLocation);
 
     return entity;
   }
@@ -327,5 +416,13 @@ public class VersionDefinitionResourceProvider extends AbstractAuthorizedResourc
     return resource;
   }
 
+  /**
+   * Convenience class to hold the xml String representation, the url, and the parsed object.
+   */
+  private static class XmlHolder {
+    String url = null;
+    String xmlString = null;
+    VersionDefinitionXml xml = null;
+  }
 
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/c84da249/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/RepositoryVersionEntity.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/RepositoryVersionEntity.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/RepositoryVersionEntity.java
index fa2f905..4af4216 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/RepositoryVersionEntity.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/RepositoryVersionEntity.java
@@ -33,6 +33,7 @@ import javax.persistence.GenerationType;
 import javax.persistence.Id;
 import javax.persistence.JoinColumn;
 import javax.persistence.Lob;
+import javax.persistence.ManyToOne;
 import javax.persistence.NamedQueries;
 import javax.persistence.NamedQuery;
 import javax.persistence.OneToMany;
@@ -124,6 +125,13 @@ public class RepositoryVersionEntity {
   @Column(name="version_xsd", insertable = true, updatable = true)
   private String versionXsd;
 
+  @ManyToOne
+  @JoinColumn(name = "parent_id")
+  private RepositoryVersionEntity parent;
+
+  @OneToMany(mappedBy = "parent")
+  private List<RepositoryVersionEntity> children;
+
   // ----- RepositoryVersionEntity -------------------------------------------------------
 
   public RepositoryVersionEntity() {
@@ -389,4 +397,26 @@ public class RepositoryVersionEntity {
     return false;
   }
 
+  /**
+   * @param parent
+   */
+  public void setParent(RepositoryVersionEntity entity) {
+    parent = entity;
+    parent.children.add(this);
+  }
+
+  /**
+   * @return the repositories that are denoted children
+   */
+  public List<RepositoryVersionEntity> getChildren() {
+    return children;
+  }
+
+  /**
+   * @return the parentId, or {@code null} if the entity is already a parent
+   */
+  public Long getParentId() {
+    return null == parent ? null : parent.getId();
+  }
+
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/c84da249/ambari-server/src/main/resources/Ambari-DDL-Derby-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-Derby-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-Derby-CREATE.sql
index a43211c..d873122 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-Derby-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-Derby-CREATE.sql
@@ -554,6 +554,7 @@ CREATE TABLE repo_version (
   version_url VARCHAR(1024),
   version_xml CLOB,
   version_xsd VARCHAR(512),
+  parent_id BIGINT,
   PRIMARY KEY(repo_version_id)
 );
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/c84da249/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql
index 1941331..705436c 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql
@@ -562,6 +562,7 @@ CREATE TABLE repo_version (
   version_url VARCHAR(1024),
   version_xml MEDIUMTEXT,
   version_xsd VARCHAR(512),
+  parent_id BIGINT,
   PRIMARY KEY(repo_version_id)
 );
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/c84da249/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql
index 75d2a4b..10785b9 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql
@@ -551,6 +551,7 @@ CREATE TABLE repo_version (
   version_url VARCHAR(1024),
   version_xml CLOB,
   version_xsd VARCHAR(512),
+  parent_id NUMBER(19),
   PRIMARY KEY(repo_version_id)
 );
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/c84da249/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql
index 1a6c9c6..54f5404 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql
@@ -555,6 +555,7 @@ CREATE TABLE repo_version (
   version_url VARCHAR(1024),
   version_xml VARCHAR TEXT,
   version_xsd VARCHAR(512),
+  parent_id BIGINT,
   PRIMARY KEY(repo_version_id)
 );
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/c84da249/ambari-server/src/main/resources/Ambari-DDL-Postgres-EMBEDDED-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-Postgres-EMBEDDED-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-Postgres-EMBEDDED-CREATE.sql
index efaa7a9..f0302f5 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-Postgres-EMBEDDED-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-Postgres-EMBEDDED-CREATE.sql
@@ -624,6 +624,7 @@ CREATE TABLE ambari.repo_version (
   version_url VARCHAR(1024),
   version_xml TEXT,
   version_xsd VARCHAR(512),
+  parent_id BIGINT,
   PRIMARY KEY(repo_version_id)
 );
 GRANT ALL PRIVILEGES ON TABLE ambari.repo_version TO :username;

http://git-wip-us.apache.org/repos/asf/ambari/blob/c84da249/ambari-server/src/main/resources/Ambari-DDL-SQLAnywhere-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-SQLAnywhere-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-SQLAnywhere-CREATE.sql
index 267a1f0..fba786f 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-SQLAnywhere-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-SQLAnywhere-CREATE.sql
@@ -552,6 +552,7 @@ CREATE TABLE repo_version (
   version_url VARCHAR(1024),
   version_xml TEXT,
   version_xsd VARCHAR(512),
+  parent_id NUMERIC(19),
   PRIMARY KEY(repo_version_id)
 );
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/c84da249/ambari-server/src/main/resources/Ambari-DDL-SQLServer-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-SQLServer-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-SQLServer-CREATE.sql
index 828a3a3..a5c60c7 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-SQLServer-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-SQLServer-CREATE.sql
@@ -656,6 +656,7 @@ CREATE TABLE repo_version (
   version_url VARCHAR(1024),
   version_xml VARCHAR(MAX),
   version_xsd VARCHAR(512),
+  parent_id BIGINT,
   PRIMARY KEY CLUSTERED (repo_version_id)
   );
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/c84da249/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/RepositoryVersionResourceProviderTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/RepositoryVersionResourceProviderTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/RepositoryVersionResourceProviderTest.java
index f3cf954..e031fc8 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/RepositoryVersionResourceProviderTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/RepositoryVersionResourceProviderTest.java
@@ -18,7 +18,6 @@
 
 package org.apache.ambari.server.controller.internal;
 
-import java.io.File;
 import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.HashSet;
@@ -33,8 +32,6 @@ import org.apache.ambari.server.controller.ResourceProviderFactory;
 import org.apache.ambari.server.controller.predicate.AndPredicate;
 import org.apache.ambari.server.controller.spi.Predicate;
 import org.apache.ambari.server.controller.spi.Request;
-import org.apache.ambari.server.controller.spi.RequestStatus;
-import org.apache.ambari.server.controller.spi.Resource;
 import org.apache.ambari.server.controller.spi.ResourceProvider;
 import org.apache.ambari.server.controller.utilities.PredicateBuilder;
 import org.apache.ambari.server.controller.utilities.PropertyHelper;
@@ -277,64 +274,7 @@ public class RepositoryVersionResourceProviderTest {
     Assert.assertEquals(1, provider.getResources(getRequest, new AndPredicate(predicateStackName, predicateStackVersion)).size());
   }
 
-  @Test
-  public void testCreateResourcesWithUrl() throws Exception {
-    Authentication authentication = TestAuthenticationFactory.createAdministrator();
-    SecurityContextHolder.getContext().setAuthentication(authentication);
-
-    File file = new File("src/test/resources/version_definition_resource_provider.xml");
-
-    final ResourceProvider versionProvider = new VersionDefinitionResourceProvider();
-    final ResourceProvider provider = injector.getInstance(ResourceProviderFactory.class).getRepositoryVersionResourceProvider();
 
-    final Set<Map<String, Object>> propertySet = new LinkedHashSet<Map<String, Object>>();
-    final Map<String, Object> properties = new LinkedHashMap<String, Object>();
-    properties.put(VersionDefinitionResourceProvider.VERSION_DEF_DEFINITION_URL, file.toURI().toURL().toString());
-    propertySet.add(properties);
-
-    final Predicate predicateStackName = new PredicateBuilder().property(RepositoryVersionResourceProvider.REPOSITORY_VERSION_STACK_NAME_PROPERTY_ID).equals("HDP").toPredicate();
-    final Predicate predicateStackVersion = new PredicateBuilder().property(RepositoryVersionResourceProvider.REPOSITORY_VERSION_STACK_VERSION_PROPERTY_ID).equals("1.1").toPredicate();
-    Request getRequest = PropertyHelper.getReadRequest(RepositoryVersionResourceProvider.REPOSITORY_VERSION_DISPLAY_NAME_PROPERTY_ID);
-    Assert.assertEquals(0, provider.getResources(getRequest, new AndPredicate(predicateStackName, predicateStackVersion)).size());
-
-    final Request createRequest = PropertyHelper.getCreateRequest(propertySet, null);
-    RequestStatus status = versionProvider.createResources(createRequest);
-    Assert.assertEquals(1, status.getAssociatedResources().size());
-
-    getRequest = PropertyHelper.getReadRequest("VersionDefinition");
-    Set<Resource> results = versionProvider.getResources(getRequest, null);
-    Assert.assertEquals(1, results.size());
-
-    results = provider.getResources(getRequest, new AndPredicate(predicateStackName, predicateStackVersion));
-    Assert.assertEquals(1, results.size());
-
-    getRequest = PropertyHelper.getReadRequest(
-        RepositoryVersionResourceProvider.REPOSITORY_VERSION_DISPLAY_NAME_PROPERTY_ID,
-        RepositoryVersionResourceProvider.REPOSITORY_VERSION_ID_PROPERTY_ID,
-        RepositoryVersionResourceProvider.REPOSITORY_VERSION_REPOSITORY_VERSION_PROPERTY_ID,
-        RepositoryVersionResourceProvider.REPOSITORY_VERSION_STACK_NAME_PROPERTY_ID,
-        RepositoryVersionResourceProvider.REPOSITORY_VERSION_STACK_VERSION_PROPERTY_ID,
-        "RepositoryVersions/release",
-        "RepositoryVersions/services",
-        RepositoryVersionResourceProvider.SUBRESOURCE_OPERATING_SYSTEMS_PROPERTY_ID,
-        RepositoryVersionResourceProvider.SUBRESOURCE_REPOSITORIES_PROPERTY_ID);
-
-    results = provider.getResources(getRequest, new AndPredicate(predicateStackName, predicateStackVersion));
-    Assert.assertEquals(1, results.size());
-
-    Resource r = results.iterator().next();
-    Map<String, Map<String, Object>> map = r.getPropertiesMap();
-    Assert.assertTrue(map.containsKey("RepositoryVersions"));
-
-    Map<String, Object> vals = map.get("RepositoryVersions");
-    Assert.assertEquals("1.1.7.1-1234", vals.get("repository_version"));
-
-    Assert.assertTrue(map.containsKey("RepositoryVersions/release"));
-    vals = map.get("RepositoryVersions/release");
-    Assert.assertEquals("1234", vals.get("build"));
-    Assert.assertEquals("2.3.4.[1-9]", vals.get("compatible_with"));
-    Assert.assertEquals("http://docs.hortonworks.com/HDPDocuments/HDP2/HDP-2.3.4/", vals.get("notes"));
-  }
 
 
   @Test

http://git-wip-us.apache.org/repos/asf/ambari/blob/c84da249/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/VersionDefinitionResourceProviderTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/VersionDefinitionResourceProviderTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/VersionDefinitionResourceProviderTest.java
new file mode 100644
index 0000000..dbac1b4
--- /dev/null
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/VersionDefinitionResourceProviderTest.java
@@ -0,0 +1,160 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.server.controller.internal;
+
+import java.io.File;
+import java.util.LinkedHashMap;
+import java.util.LinkedHashSet;
+import java.util.Map;
+import java.util.Set;
+
+import org.apache.ambari.server.api.services.AmbariMetaInfo;
+import org.apache.ambari.server.controller.ResourceProviderFactory;
+import org.apache.ambari.server.controller.predicate.AndPredicate;
+import org.apache.ambari.server.controller.spi.Predicate;
+import org.apache.ambari.server.controller.spi.Request;
+import org.apache.ambari.server.controller.spi.RequestStatus;
+import org.apache.ambari.server.controller.spi.Resource;
+import org.apache.ambari.server.controller.spi.ResourceProvider;
+import org.apache.ambari.server.controller.utilities.PredicateBuilder;
+import org.apache.ambari.server.controller.utilities.PropertyHelper;
+import org.apache.ambari.server.orm.GuiceJpaInitializer;
+import org.apache.ambari.server.orm.InMemoryDefaultTestModule;
+import org.apache.ambari.server.orm.dao.RepositoryVersionDAO;
+import org.apache.ambari.server.orm.dao.StackDAO;
+import org.apache.ambari.server.orm.entities.RepositoryVersionEntity;
+import org.apache.ambari.server.orm.entities.StackEntity;
+import org.apache.ambari.server.security.TestAuthenticationFactory;
+import org.junit.After;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+import org.springframework.security.core.Authentication;
+import org.springframework.security.core.context.SecurityContextHolder;
+
+import com.google.inject.Guice;
+import com.google.inject.Injector;
+import com.google.inject.persist.PersistService;
+
+/**
+ * Tests the VersionDefinitionResourceProvider class
+ */
+public class VersionDefinitionResourceProviderTest {
+  private Injector injector;
+
+  @Before
+  public void before() throws Exception {
+    injector = Guice.createInjector(new InMemoryDefaultTestModule());
+    injector.getInstance(GuiceJpaInitializer.class);
+
+    AmbariMetaInfo ami = injector.getInstance(AmbariMetaInfo.class);
+    ami.init();
+
+    StackDAO stackDao = injector.getInstance(StackDAO.class);
+    StackEntity stack = stackDao.find("HDP", "2.2.0");
+
+    RepositoryVersionEntity entity = new RepositoryVersionEntity();
+    entity.setStack(stack);
+    entity.setDisplayName("2.2.0.0");
+    entity.setVersion("2.3.4.4-1234");
+
+    RepositoryVersionDAO dao = injector.getInstance(RepositoryVersionDAO.class);
+    dao.create(entity);
+
+  }
+
+  @After
+  public void after() throws Exception {
+    injector.getInstance(PersistService.class).stop();
+  }
+
+  @Test
+  public void testWithParent() throws Exception {
+    Authentication authentication = TestAuthenticationFactory.createAdministrator();
+    SecurityContextHolder.getContext().setAuthentication(authentication);
+
+    File file = new File("src/test/resources/version_definition_resource_provider.xml");
+
+    final ResourceProvider versionProvider = new VersionDefinitionResourceProvider();
+    final ResourceProvider provider = injector.getInstance(ResourceProviderFactory.class)
+        .getRepositoryVersionResourceProvider();
+
+    final Set<Map<String, Object>> propertySet = new LinkedHashSet<Map<String, Object>>();
+    final Map<String, Object> properties = new LinkedHashMap<String, Object>();
+    properties.put(VersionDefinitionResourceProvider.VERSION_DEF_DEFINITION_URL,
+        file.toURI().toURL().toString());
+    propertySet.add(properties);
+
+
+    final Request createRequest = PropertyHelper.getCreateRequest(propertySet, null);
+    RequestStatus status = versionProvider.createResources(createRequest);
+    Assert.assertEquals(1, status.getAssociatedResources().size());
+
+    Request getRequest = PropertyHelper.getReadRequest("VersionDefinition");
+    Set<Resource> results = versionProvider.getResources(getRequest, null);
+    Assert.assertEquals(1, results.size());
+
+    final Predicate predicateStackName = new PredicateBuilder().property(RepositoryVersionResourceProvider.REPOSITORY_VERSION_STACK_NAME_PROPERTY_ID).equals("HDP").toPredicate();
+    final Predicate predicateStackVersion = new PredicateBuilder().property(RepositoryVersionResourceProvider.REPOSITORY_VERSION_STACK_VERSION_PROPERTY_ID).equals("2.2.0").toPredicate();
+
+    results = provider.getResources(getRequest,
+        new AndPredicate(predicateStackName, predicateStackVersion));
+    Assert.assertEquals(1, results.size());
+
+    getRequest = PropertyHelper.getReadRequest(
+        RepositoryVersionResourceProvider.REPOSITORY_VERSION_DISPLAY_NAME_PROPERTY_ID,
+        RepositoryVersionResourceProvider.REPOSITORY_VERSION_ID_PROPERTY_ID,
+        RepositoryVersionResourceProvider.REPOSITORY_VERSION_REPOSITORY_VERSION_PROPERTY_ID,
+        RepositoryVersionResourceProvider.REPOSITORY_VERSION_STACK_NAME_PROPERTY_ID,
+        RepositoryVersionResourceProvider.REPOSITORY_VERSION_STACK_VERSION_PROPERTY_ID,
+        RepositoryVersionResourceProvider.SUBRESOURCE_OPERATING_SYSTEMS_PROPERTY_ID,
+        RepositoryVersionResourceProvider.SUBRESOURCE_REPOSITORIES_PROPERTY_ID,
+        "RepositoryVersions/release", "RepositoryVersions/services",
+        "RepositoryVersions/has_children", "RepositoryVersions/parent_id");
+
+    results = provider.getResources(getRequest,
+        new AndPredicate(predicateStackName, predicateStackVersion));
+    Assert.assertEquals(2, results.size());
+
+    Resource r = null;
+    for (Resource result : results) {
+      if (result.getPropertyValue("RepositoryVersions/repository_version").equals("2.2.0.8-5678")) {
+        r = result;
+        break;
+      }
+    }
+
+    Assert.assertNotNull(r);
+    Map<String, Map<String, Object>> map = r.getPropertiesMap();
+    Assert.assertTrue(map.containsKey("RepositoryVersions"));
+
+    Map<String, Object> vals = map.get("RepositoryVersions");
+
+    Assert.assertEquals("2.2.0.8-5678", vals.get("repository_version"));
+    Assert.assertNotNull(vals.get("parent_id"));
+    Assert.assertEquals(Boolean.FALSE, vals.get("has_children"));
+
+
+    Assert.assertTrue(map.containsKey("RepositoryVersions/release"));
+    vals = map.get("RepositoryVersions/release");
+    Assert.assertEquals("5678", vals.get("build"));
+    Assert.assertEquals("2.3.4.[1-9]", vals.get("compatible_with"));
+    Assert.assertEquals("http://docs.hortonworks.com/HDPDocuments/HDP2/HDP-2.3.4/",
+        vals.get("notes"));
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/c84da249/ambari-server/src/test/resources/version_definition_resource_provider.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/resources/version_definition_resource_provider.xml b/ambari-server/src/test/resources/version_definition_resource_provider.xml
index 735f91d..55cd1c1 100644
--- a/ambari-server/src/test/resources/version_definition_resource_provider.xml
+++ b/ambari-server/src/test/resources/version_definition_resource_provider.xml
@@ -21,9 +21,9 @@
   
   <release>
     <type>PATCH</type>
-    <stack-id>HDP-1.1</stack-id>
-    <version>1.1.7.1</version>
-    <build>1234</build>
+    <stack-id>HDP-2.2.0</stack-id>
+    <version>2.2.0.8</version>
+    <build>5678</build>
     <compatible-with>2.3.4.[1-9]</compatible-with>
     <release-notes>http://docs.hortonworks.com/HDPDocuments/HDP2/HDP-2.3.4/</release-notes>
   </release>
@@ -54,17 +54,12 @@
         <reponame>HDP-UTILS</reponame>
       </repo>
     </os>
-    <os family="redhat7">
+    <os family="suse11">
       <repo>
         <baseurl>http://public-repo-1.hortonworks.com/HDP/centos7/2.x/updates/2.3.0.0</baseurl>
         <repoid>HDP-2.3</repoid>
         <reponame>HDP</reponame>
       </repo>
-      <repo>
-        <baseurl>http://public-repo-1.hortonworks.com/HDP-UTILS-1.1.0.20/repos/centos7</baseurl>
-        <repoid>HDP-UTILS-1.1.0.20</repoid>
-        <reponame>HDP-UTILS</reponame>
-      </repo>
     </os>
 
   </repository-info>


[31/50] [abbrv] ambari git commit: Merge branch 'trunk' into branch-dev-patch-upgrade

Posted by nc...@apache.org.
Merge branch 'trunk' into branch-dev-patch-upgrade


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/4b5a250e
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/4b5a250e
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/4b5a250e

Branch: refs/heads/trunk
Commit: 4b5a250e7778ffa89e318d293dafe727d8a20644
Parents: 418745d d48fb9b
Author: Lisnichenko Dmitro <dl...@hortonworks.com>
Authored: Fri Feb 19 15:24:50 2016 +0200
Committer: Lisnichenko Dmitro <dl...@hortonworks.com>
Committed: Fri Feb 19 15:24:50 2016 +0200

----------------------------------------------------------------------
 .../src/main/repo/install_ambari_tarball.py     |   2 +-
 ambari-server/conf/unix/ambari-env.sh           |   6 +-
 ambari-server/conf/unix/ambari.properties       |  30 +--
 ambari-server/conf/unix/install-helper.sh       |   7 +-
 ambari-server/conf/unix/log4j.properties        |   2 +-
 ambari-server/conf/windows/log4j.properties     |   2 +-
 ambari-server/pom.xml                           |  14 ++
 ambari-server/sbin/ambari-server                |  57 +++--
 ambari-server/src/main/conf/log4j.properties    |   2 +-
 .../controller/AmbariActionExecutionHelper.java |  13 +-
 .../ambari/server/controller/AmbariServer.java  |   3 +-
 .../internal/AbstractProviderModule.java        |  31 +--
 .../internal/UpgradeResourceProvider.java       |   1 +
 .../controller/utilities/DatabaseChecker.java   |   2 +-
 .../ambari/server/stack/MasterHostResolver.java |  53 ++--
 .../src/main/package/deb/control/preinst        |   2 +-
 ambari-server/src/main/python/ambari-server.py  |  17 +-
 .../main/python/ambari_server/BackupRestore.py  |  11 +-
 .../src/main/python/ambari_server/ambariPath.py |  41 ++++
 .../ambari_server/dbConfiguration_linux.py      |  29 +--
 .../python/ambari_server/serverConfiguration.py | 103 ++++----
 .../main/python/ambari_server/serverSetup.py    |   6 +-
 .../main/resources/Ambari-DDL-MySQL-CREATE.sql  |   2 +-
 .../main/resources/Ambari-DDL-Oracle-CREATE.sql |   2 +-
 .../resources/Ambari-DDL-Postgres-CREATE.sql    |   2 +-
 .../Ambari-DDL-Postgres-EMBEDDED-CREATE.sql     |   2 +-
 .../resources/Ambari-DDL-SQLAnywhere-CREATE.sql |   2 +-
 .../resources/Ambari-DDL-SQLServer-CREATE.sql   |   2 +-
 .../common-services/HAWQ/2.0.0/metainfo.xml     |  16 ++
 .../HAWQ/2.0.0/package/scripts/common.py        |   9 +-
 .../2.0.0/package/scripts/hawq_constants.py     |   2 +-
 .../HAWQ/2.0.0/package/scripts/hawqmaster.py    |  19 ++
 .../HAWQ/2.0.0/package/scripts/master_helper.py |  16 +-
 .../HAWQ/2.0.0/package/scripts/params.py        |   6 +
 .../HAWQ/2.0.0/package/templates/hawq-hosts.j2  |  22 --
 .../HAWQ/2.0.0/package/templates/slaves.j2      |  22 --
 .../1.4.4.2.0/configuration/sqoop-site.xml      |  43 ++++
 .../SQOOP/1.4.4.2.0/metainfo.xml                |   6 +
 .../1.4.4.2.0/package/scripts/params_linux.py   |  20 +-
 .../SQOOP/1.4.4.2.0/package/scripts/sqoop.py    |  26 ++
 .../main/resources/scripts/Ambaripreupload.py   |   7 +
 .../stacks/HDP/2.2/role_command_order.json      |   1 -
 .../HDP/2.3/upgrades/nonrolling-upgrade-2.4.xml |  13 +-
 .../stacks/HDP/2.3/upgrades/upgrade-2.4.xml     |   2 +
 .../main/resources/stacks/HDP/2.6/metainfo.xml  |  25 ++
 .../resources/stacks/HDP/2.6/repos/repoinfo.xml |  92 +++++++
 .../HDP/2.6/services/ACCUMULO/metainfo.xml      |  26 ++
 .../stacks/HDP/2.6/services/ATLAS/metainfo.xml  |  26 ++
 .../stacks/HDP/2.6/services/FALCON/metainfo.xml |  26 ++
 .../stacks/HDP/2.6/services/FLUME/metainfo.xml  |  26 ++
 .../stacks/HDP/2.6/services/HBASE/metainfo.xml  |  26 ++
 .../stacks/HDP/2.6/services/HDFS/metainfo.xml   |  26 ++
 .../stacks/HDP/2.6/services/HIVE/metainfo.xml   |  26 ++
 .../stacks/HDP/2.6/services/KAFKA/metainfo.xml  |  26 ++
 .../HDP/2.6/services/KERBEROS/metainfo.xml      |  25 ++
 .../stacks/HDP/2.6/services/KNOX/metainfo.xml   |  26 ++
 .../stacks/HDP/2.6/services/MAHOUT/metainfo.xml |  26 ++
 .../stacks/HDP/2.6/services/OOZIE/metainfo.xml  |  26 ++
 .../stacks/HDP/2.6/services/PIG/metainfo.xml    |  26 ++
 .../stacks/HDP/2.6/services/RANGER/metainfo.xml |  29 +++
 .../HDP/2.6/services/RANGER_KMS/metainfo.xml    |  29 +++
 .../stacks/HDP/2.6/services/SLIDER/metainfo.xml |  26 ++
 .../stacks/HDP/2.6/services/SPARK/metainfo.xml  |  29 +++
 .../stacks/HDP/2.6/services/SQOOP/metainfo.xml  |  26 ++
 .../stacks/HDP/2.6/services/STORM/metainfo.xml  |  27 +++
 .../stacks/HDP/2.6/services/TEZ/metainfo.xml    |  26 ++
 .../stacks/HDP/2.6/services/YARN/metainfo.xml   |  27 +++
 .../HDP/2.6/services/ZOOKEEPER/metainfo.xml     |  26 ++
 .../stacks/HDP/2.6/services/stack_advisor.py    |  22 ++
 .../AmbariManagementControllerTest.java         |   2 +-
 .../ambari/server/state/UpgradeHelperTest.java  | 140 +++++++++--
 .../ambari/server/utils/TestVersionUtils.java   |   2 +
 .../src/test/python/TestAmbariServer.py         |   2 +-
 .../src/test/python/TestBackupRestore.py        |   3 +
 ambari-server/src/test/python/TestOSCheck.py    |   1 +
 .../src/test/python/TestResourceFilesKeeper.py  |   3 +
 .../src/test/python/TestServerClassPath.py      |   3 +
 .../src/test/python/TestServerUpgrade.py        |   2 +
 .../src/test/python/TestServerUtils.py          |   3 +
 .../2.0.6/HBASE/test_phoenix_queryserver.py     |   9 +-
 .../python/stacks/2.0.6/SQOOP/test_sqoop.py     |  12 +
 .../python/stacks/2.0.6/configs/default.json    |   5 +
 .../python/stacks/2.0.6/configs/secured.json    |   5 +
 .../test/python/stacks/2.3/configs/default.json |   5 +
 .../alerts/definition_configs_controller.js     |   1 +
 .../service/widgets/create/wizard_controller.js |  16 +-
 .../app/controllers/wizard/step8_controller.js  | 240 ++++++-------------
 ambari-web/app/messages.js                      |   8 +-
 .../mixins/common/configs/configs_comparator.js |   4 +-
 .../app/mixins/common/configs/configs_saver.js  | 217 +++++++----------
 ambari-web/app/models/host_component.js         |  44 +++-
 .../alerts/configs/alert_config_parameter.hbs   |   2 +-
 ambari-web/app/utils/helper.js                  |  20 +-
 .../stack_upgrade/upgrade_version_box_view.js   |   8 +-
 .../main/alerts/definition_configs_view.js      |  10 +-
 ambari-web/app/views/main/service/item.js       |   2 +-
 .../views/main/service/reassign/step5_view.js   |  16 +-
 .../test/controllers/wizard/step4_test.js       |  36 ++-
 .../test/controllers/wizard/step8_test.js       | 176 +-------------
 .../mixins/common/configs/configs_saver_test.js |  98 ++++----
 ambari-web/test/utils/config_test.js            |   4 -
 .../upgrade_version_box_view_test.js            |  17 +-
 .../views/main/service/info/summary_test.js     |   4 +-
 103 files changed, 1691 insertions(+), 826 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/4b5a250e/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/UpgradeResourceProvider.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/4b5a250e/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/4b5a250e/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql
----------------------------------------------------------------------
diff --cc ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql
index f8e4ee1,e43a7fc..026efea
--- a/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql
@@@ -1010,10 -990,8 +1010,10 @@@ INSERT INTO ambari_sequences(sequence_n
  INSERT INTO ambari_sequences(sequence_name, sequence_value) values ('topology_host_group_id_seq', 0);
  INSERT INTO ambari_sequences(sequence_name, sequence_value) values ('setting_id_seq', 0);
  INSERT INTO ambari_sequences(sequence_name, sequence_value) values ('hostcomponentstate_id_seq', 0);
 +INSERT INTO ambari_sequences(sequence_name, sequence_value) values ('servicecomponentdesiredstate_id_seq', 0);
 +INSERT INTO ambari_sequences(sequence_name, sequence_value) values ('servicecomponent_history_id_seq', 0);
  
- INSERT INTO metainfo("metainfo_key", "metainfo_value") values ('version', '${ambariVersion}');
+ INSERT INTO metainfo("metainfo_key", "metainfo_value") values ('version', '${ambariSchemaVersion}');
  
  insert into adminresourcetype (resource_type_id, resource_type_name)
    select 1, 'AMBARI' from dual

http://git-wip-us.apache.org/repos/asf/ambari/blob/4b5a250e/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/4b5a250e/ambari-server/src/main/resources/Ambari-DDL-Postgres-EMBEDDED-CREATE.sql
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/4b5a250e/ambari-server/src/main/resources/Ambari-DDL-SQLAnywhere-CREATE.sql
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/4b5a250e/ambari-server/src/main/resources/Ambari-DDL-SQLServer-CREATE.sql
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/4b5a250e/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/4b5a250e/ambari-server/src/test/java/org/apache/ambari/server/state/UpgradeHelperTest.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/4b5a250e/ambari-web/app/messages.js
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/4b5a250e/ambari-web/app/views/main/admin/stack_upgrade/upgrade_version_box_view.js
----------------------------------------------------------------------


[27/50] [abbrv] ambari git commit: AMBARI-14930. Admin Versions: display all existed stack versions in new format. (xiwang)

Posted by nc...@apache.org.
AMBARI-14930. Admin Versions: display all existed stack versions in new format. (xiwang)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/1e89d1d5
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/1e89d1d5
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/1e89d1d5

Branch: refs/heads/trunk
Commit: 1e89d1d578d6ae38fe706e7e07eb0cde52429d92
Parents: 03bc226
Author: Xi Wang <xi...@apache.org>
Authored: Thu Feb 4 12:00:30 2016 -0800
Committer: Xi Wang <xi...@apache.org>
Committed: Thu Feb 11 15:37:49 2016 -0800

----------------------------------------------------------------------
 .../app/mappers/repository_version_mapper.js    |  91 +++++++++++++++
 ambari-web/app/messages.js                      |   5 +
 ambari-web/app/models.js                        |   1 +
 .../models/stack_version/repository_version.js  |   6 +-
 .../app/models/stack_version/service_simple.js  |  28 +++++
 ambari-web/app/styles/stack_versions.less       | 112 ++++++++++++++++++-
 .../admin/stack_upgrade/upgrade_version_box.hbs |   4 +
 .../stack_upgrade/upgrade_version_column.hbs    |  86 ++++++++++++++
 .../main/admin/stack_upgrade/versions.hbs       |  61 ++++++----
 ambari-web/app/utils/host_progress_popup.js     |   4 +-
 ambari-web/app/views.js                         |   1 +
 .../stack_upgrade/upgrade_version_box_view.js   |  17 ++-
 .../upgrade_version_column_view.js              | 106 ++++++++++++++++++
 .../main/admin/stack_upgrade/versions_view.js   |   5 +
 14 files changed, 495 insertions(+), 32 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/1e89d1d5/ambari-web/app/mappers/repository_version_mapper.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/mappers/repository_version_mapper.js b/ambari-web/app/mappers/repository_version_mapper.js
index 0bb094f..c937678 100644
--- a/ambari-web/app/mappers/repository_version_mapper.js
+++ b/ambari-web/app/mappers/repository_version_mapper.js
@@ -21,6 +21,7 @@ App.repoVersionMapper = App.QuickDataMapper.create({
   modelRepoVersions: App.RepositoryVersion,
   modelOperatingSystems: App.OS,
   modelRepositories: App.Repository,
+  modelServices: App.ServiceSimple,
 
   modelRepoVersion: function (isCurrentStackOnly) {
     var repoVersionsKey = isCurrentStackOnly ? 'RepositoryVersions' : 'CompatibleRepositoryVersions';
@@ -28,10 +29,16 @@ App.repoVersionMapper = App.QuickDataMapper.create({
       id: repoVersionsKey + '.id',
       stack_version_id: repoVersionsKey + '.stackVersionId',
       display_name: repoVersionsKey + '.display_name',
+      type: repoVersionsKey + '.type',
       repository_version: repoVersionsKey + '.repository_version',
       upgrade_pack: repoVersionsKey + '.upgrade_pack',
       stack_version_type: repoVersionsKey + '.stack_name',
       stack_version_number: repoVersionsKey + '.stack_version',
+      services_key: 'services',
+      services_type: 'array',
+      services: {
+        item: 'id'
+      },
       operating_systems_key: 'operating_systems',
       operating_systems_type: 'array',
       operating_systems: {
@@ -53,6 +60,13 @@ App.repoVersionMapper = App.QuickDataMapper.create({
     }
   },
 
+  modelService: {
+    id: 'id',
+    name: 'name',
+    display_name: 'display_name',
+    latest_version: 'latest_version'
+  },
+
   modelRepository: {
     id: 'id',
     operating_system_id: 'Repositories.operating_system_id',
@@ -71,18 +85,81 @@ App.repoVersionMapper = App.QuickDataMapper.create({
     var modelRepoVersions = this.get('modelRepoVersions');
     var modelOperatingSystems = this.get('modelOperatingSystems');
     var modelRepositories = this.get('modelRepositories');
+    var modelServices = this.get('modelServices');
 
     var resultRepoVersion = [];
     var resultOS = [];
     var resultRepo = [];
+    var resultService = [];
 
     var repoVersionsKey = isCurrentStackOnly ? 'RepositoryVersions' : 'CompatibleRepositoryVersions';
 
     if (json && json.items) {
       json.items.forEach(function (item) {
+        ////////////// to test//////////////
+        item[repoVersionsKey].type = "PATCH";
+        item[repoVersionsKey].release = {
+          "stack_id": "HDP-2.2",
+          "version": "2.2.4.1",
+          "build": "885",
+          "compatible_with": "", /* regex */
+          "release_notes": "http://someurl"
+        };
+        item[repoVersionsKey].services = [
+          {
+            "name": "HDFS",
+            "display_name": "HDFS",
+            "versions": [
+              {
+                "version": "2.1.1",
+                "version_id": "10",
+                "components": [ "NAMENODE"]
+              }
+            ]
+          },
+          {
+            "name": "YARN",
+            "display_name": "YARN",
+            "versions": [
+              {
+                "version": "2.7.1.2.3"
+              }
+            ]
+          },
+          {
+            "name": "ZOOKEEPER",
+            "display_name": "ZooKeeper",
+            "versions": [
+              {
+                "version": "3.4.6.2.3"
+              }
+            ]
+          },
+          {
+            "name": "wrong",
+            "display_name": "MapReduce2",
+            "versions": [
+              {
+                "version": "2.7.1.2.3"
+              }
+            ]
+          },
+          {
+            "name": "AMBARI_METRICS",
+            "display_name": "Ambari Metrics",
+            "versions": [
+              {
+                "version": "0.1.0"
+              }
+            ]
+          }
+        ];
+        //////////////////////////////
+
         if (loadAll || (item[repoVersionsKey] && !App.StackVersion.find().someProperty('repositoryVersion.id', item[repoVersionsKey].id))) {
           var repo = item;
           var osArray = [];
+          var serviceArray = [];
           if (item.operating_systems) {
             item.operating_systems.forEach(function (os) {
               os.id = item[repoVersionsKey].repository_version + os.OperatingSystems.os_type;
@@ -101,7 +178,20 @@ App.repoVersionMapper = App.QuickDataMapper.create({
               resultOS.push(this.parseIt(os, this.get('modelOS')));
             }, this);
           }
+          if (item[repoVersionsKey].services) {
+            item[repoVersionsKey].services.forEach(function (service) {
+              var serviceObj = {
+                id: service.name,
+                name: service.name,
+                display_name: service.display_name,
+                latest_version: service.versions[0] ? service.versions[0].version : ''
+              };
+              serviceArray.pushObject(serviceObj);
+              resultService.push(this.parseIt(serviceObj, this.get('modelService')));
+            }, this);
+          }
           repo.operating_systems = osArray;
+          repo.services = serviceArray;
           resultRepoVersion.push(this.parseIt(repo, this.modelRepoVersion(isCurrentStackOnly)));
         }
       }, this);
@@ -109,6 +199,7 @@ App.repoVersionMapper = App.QuickDataMapper.create({
     App.store.commit();
     App.store.loadMany(modelRepositories, resultRepo);
     App.store.loadMany(modelOperatingSystems, resultOS);
+    App.store.loadMany(modelServices, resultService);
     App.store.loadMany(modelRepoVersions, resultRepoVersion);
   }
 });

http://git-wip-us.apache.org/repos/asf/ambari/blob/1e89d1d5/ambari-web/app/messages.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/messages.js b/ambari-web/app/messages.js
index 59877a5..8860915 100644
--- a/ambari-web/app/messages.js
+++ b/ambari-web/app/messages.js
@@ -207,7 +207,9 @@ Em.I18n.translations = {
   'common.stacks': 'Stacks',
   'common.stack': 'Stack',
   'common.reset': 'Reset',
+  'common.resume': 'Resume',
   'common.path': 'Path',
+  'common.patch': 'Patch',
   'common.package': 'Package',
   'common.proceed': 'Proceed',
   'common.proceedAnyway': 'Proceed Anyway',
@@ -1522,6 +1524,9 @@ Em.I18n.translations = {
   'admin.stackVersions.version.upgrade.upgradeOptions.EU.confirm.msg': 'You are about to perform an <b>Express Upgrade</b> from <b>{0}</b> to <b>{1}</b>. This will incur cluster downtime. Are you sure you want to proceed?',
   'admin.stackVersions.version.upgrade.upgradeOptions.RU.confirm.msg': 'You are about to perform a <b>Rolling Upgrade</b> from <b>{0}</b> to <b>{1}</b>. Are you sure you want to proceed?',
 
+  'admin.stackVersions.version.column.showDetails': "Show Details",
+  'admin.stackVersions.version.column.showDetails.title': "Version Details",
+
   'admin.stackVersions.hosts.popup.header.current': "Current",
   'admin.stackVersions.hosts.popup.header.installed': "Installed",
   'admin.stackVersions.hosts.popup.header.not_installed': "Not installed",

http://git-wip-us.apache.org/repos/asf/ambari/blob/1e89d1d5/ambari-web/app/models.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/models.js b/ambari-web/app/models.js
index 1f0a0ab..01fe689 100644
--- a/ambari-web/app/models.js
+++ b/ambari-web/app/models.js
@@ -28,6 +28,7 @@ require('models/stack');
 require('models/stack_version/version');
 require('models/stack_version/repository_version');
 require('models/stack_version/os');
+require('models/stack_version/service_simple');
 require('models/stack_version/repository');
 require('models/operating_system');
 require('models/repository');

http://git-wip-us.apache.org/repos/asf/ambari/blob/1e89d1d5/ambari-web/app/models/stack_version/repository_version.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/models/stack_version/repository_version.js b/ambari-web/app/models/stack_version/repository_version.js
index bea2d89..e02999f 100644
--- a/ambari-web/app/models/stack_version/repository_version.js
+++ b/ambari-web/app/models/stack_version/repository_version.js
@@ -20,13 +20,18 @@ var App = require('app');
 
 App.RepositoryVersion = DS.Model.extend({
   displayName: DS.attr('string'),
+  type: DS.attr('string'), // "PATCH" /* STANDARD, (future: SERVICE) */
   repositoryVersion: DS.attr('string'),
   upgradePack: DS.attr('string'),
   stackVersionType: DS.attr('string'),
   stackVersionNumber: DS.attr('string'),
   operatingSystems: DS.hasMany('App.OS'),
+  services: DS.hasMany('App.ServiceSimple'),
   stackVersion: DS.belongsTo('App.StackVersion'),
   stack: Em.computed.concat(' ', 'stackVersionType', 'stackVersionNumber'),
+  displayNameSimple: function() {
+    return this.get('stackVersionType') + '-' + this.get('repositoryVersion').split('-')[0];
+  }.property('stackVersionType', 'repositoryVersion'),
 
   /**
    * status used until corresponding stack version get created
@@ -105,4 +110,3 @@ App.RepositoryVersion = DS.Model.extend({
 });
 
 App.RepositoryVersion.FIXTURES = [];
-

http://git-wip-us.apache.org/repos/asf/ambari/blob/1e89d1d5/ambari-web/app/models/stack_version/service_simple.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/models/stack_version/service_simple.js b/ambari-web/app/models/stack_version/service_simple.js
new file mode 100644
index 0000000..3f4b36d
--- /dev/null
+++ b/ambari-web/app/models/stack_version/service_simple.js
@@ -0,0 +1,28 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+var App = require('app');
+
+App.ServiceSimple = DS.Model.extend({
+  id: DS.attr('string'),
+  name: DS.attr('string'),
+  displayName: DS.attr('string'),
+  latestVersion: DS.attr('string')
+});
+
+App.ServiceSimple.FIXTURES = [];
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/1e89d1d5/ambari-web/app/styles/stack_versions.less
----------------------------------------------------------------------
diff --git a/ambari-web/app/styles/stack_versions.less b/ambari-web/app/styles/stack_versions.less
index ab27a84..5490592 100644
--- a/ambari-web/app/styles/stack_versions.less
+++ b/ambari-web/app/styles/stack_versions.less
@@ -127,8 +127,43 @@
     }
   }
 }
+.version-box-popup {
+  .modal{
+    max-height: 563px;
+    max-width: 340px;
+    margin: -250px 0 0 -180px;
+  }
+  .version-box-in-popup.version-box {
+    max-width: 300px;
+    .hosts-section {
+      padding-bottom: 16px;
+    }
+  }
+}
 
-#stack-upgrade-page {
+#stack-upgrade-page,
+.version-box-popup {
+  .versions-slides {
+    overflow-x: scroll;
+    padding-bottom: 25px;
+    margin-left: 0px;
+    margin-top: 5px;
+    background-color: #f5f5f5;
+    border: 2px solid #f5f5f5;
+    border-left: none;
+    .versions-slides-bar {
+      min-width: 1000px;
+      max-width: 20000px;
+    }
+  }
+  .left-menu-table {
+    padding: 10px 1px;
+    margin-top: 87px;
+    td.service-display-name {
+      border: 2px solid #e4e4e4;
+      border-left: none;
+    }
+  }
   div.current-version-box {
     background: none repeat scroll 0 0 #e6f1f6;
     box-shadow: 0 8px 6px -6px black;
@@ -226,6 +261,81 @@
       font-size: 12px;
     }
   }
+  .version-column {
+    min-height: 500px;
+    width: 115px;
+    margin: 15px 7px 0px 7px;
+    .version-column-display-name {
+      color: #000;
+      text-align: center;
+      font-weight: 500;
+      font-size: 16px;
+      line-height: 18px;
+      padding: 5px 5px;
+      margin-bottom: 0px;
+    }
+    .version-column-show-details {
+      text-align: center;
+      font-size: 13px;
+      padding: 5px 5px;
+      border-bottom: 1px solid #e4e4e4;
+      margin-bottom: 0px;
+      a {
+        cursor: pointer;
+      }
+    }
+    .state {
+      margin: 5px 0px 15px 0px;
+      line-height: 20px;
+      .label-success {
+        line-height: 30px;
+      }
+      .btn.btn-primary:not(.dropdown-toggle) {
+        padding: 4px 5px;
+        min-width: 70px;
+      }
+      .open .dropdown-menu {
+        min-width: 90px;
+      }
+    }
+    .state.installing {
+      padding-bottom: 5px;
+      margin: 10px 0px;
+    }
+    .services-section {
+      width: 100%;
+      text-align: center;
+      padding-bottom: 4px;
+      .service-version-info {
+        margin-bottom: 16px;
+        span.label {
+          padding: 5px 15px;
+          background-color: #a3a3a3;
+          z-index: 100;
+          position: relative;
+        }
+      }
+      .line-separator{
+        height: 2px;
+        background: #e4e4e4;
+        position: relative;
+        top: -25px;
+        margin: 0px -15px 0px -15px;
+      }
+
+    }
+  }
+  div.current-version-box.version-column {
+    .services-section{
+      .service-version-info {
+        span.label {
+          background-color: #468847;
+        }
+      }
+    }
+
+
+  }
 }
 
 #stack-upgrade-dialog {

http://git-wip-us.apache.org/repos/asf/ambari/blob/1e89d1d5/ambari-web/app/templates/main/admin/stack_upgrade/upgrade_version_box.hbs
----------------------------------------------------------------------
diff --git a/ambari-web/app/templates/main/admin/stack_upgrade/upgrade_version_box.hbs b/ambari-web/app/templates/main/admin/stack_upgrade/upgrade_version_box.hbs
index 70a0738..84d7835 100644
--- a/ambari-web/app/templates/main/admin/stack_upgrade/upgrade_version_box.hbs
+++ b/ambari-web/app/templates/main/admin/stack_upgrade/upgrade_version_box.hbs
@@ -27,6 +27,10 @@
   </a>
 </p>
 <p class="repository-name">({{view.content.repositoryVersion}})</p>
+{{#if view.isPatch}}
+  <p class="patch-icon"></p>
+{{/if}}
+
 <div {{bindAttr class="view.stateElement.isInstalling:installing :align-center :state"}}>
   {{#if view.stateElement.isButton}}
     <button class="btn btn-primary"

http://git-wip-us.apache.org/repos/asf/ambari/blob/1e89d1d5/ambari-web/app/templates/main/admin/stack_upgrade/upgrade_version_column.hbs
----------------------------------------------------------------------
diff --git a/ambari-web/app/templates/main/admin/stack_upgrade/upgrade_version_column.hbs b/ambari-web/app/templates/main/admin/stack_upgrade/upgrade_version_column.hbs
new file mode 100644
index 0000000..c343151
--- /dev/null
+++ b/ambari-web/app/templates/main/admin/stack_upgrade/upgrade_version_column.hbs
@@ -0,0 +1,86 @@
+{{!
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+}}
+
+{{#if view.isOutOfSync}}
+  <i class="icon-exclamation-sign out-of-sync-badge"></i>
+{{/if}}
+
+<p class="version-column-display-name">
+  <span>{{view.content.displayNameSimple}}</span>
+</p>
+
+<p class="version-column-show-details">
+  <a {{action openVersionBoxPopup target="view"}}>{{t admin.stackVersions.version.column.showDetails}}</a>
+</p>
+<div {{bindAttr class="view.stateElement.isInstalling:installing :align-center :state"}}>
+  {{#if view.stateElement.isButton}}
+    <button class="btn btn-primary"
+      {{action runAction target="view"}}
+      {{bindAttr disabled="view.stateElement.isDisabled"}}>
+      {{view.stateElement.text}}
+    </button>
+ {{/if}}
+  {{#if view.stateElement.isButtonGroup}}
+    <div class="btn-group">
+      <button class="btn btn-primary"
+        {{action runAction target="view"}}
+        {{bindAttr disabled="view.stateElement.isDisabled"}}>
+        {{view.stateElement.text}}
+      </button>
+      <button class="btn btn-primary dropdown-toggle" data-toggle="dropdown" {{bindAttr disabled="view.stateElement.isDisabled"}}>
+        <span class="caret"></span>
+      </button>
+      <ul class="dropdown-menu">
+        {{#each button in view.stateElement.buttons}}
+          <li {{bindAttr class="button.isDisabled:disabled"}}>
+            <a {{action runAction button.action target="view"}}>
+              {{button.text}}
+            </a>
+          </li>
+        {{/each}}
+      </ul>
+    </div>
+  {{/if}}
+  {{#if view.stateElement.isLabel}}
+      <span {{bindAttr class="view.stateElement.class"}}>{{view.stateElement.text}}</span>
+  {{/if}}
+  {{#if view.stateElement.isLink}}
+    {{#if view.stateElement.iconClass}}
+        <i {{bindAttr class="view.stateElement.iconClass"}}></i>
+    {{/if}}
+      <a href="#" {{action runAction target="view"}}>{{view.stateElement.text}}</a>
+    {{#if view.stateElement.isInstalling}}
+      {{view App.ProgressBarView
+          progressBinding="view.installProgress"
+          statusBinding="view.PROGRESS_STATUS"
+          }}
+    {{/if}}
+  {{/if}}
+  {{#if view.stateElement.isSpinner}}
+    {{view App.SpinnerView}}
+  {{/if}}
+</div>
+
+<div class="services-section">
+  {{#each service in view.services}}
+    <p class="service-version-info">
+       <span {{bindAttr class="service.isVersionInvisible:invisible :label"}}>{{service.latestVersion}}</span>
+    </p>
+    <div class="line-separator"></div>
+  {{/each}}
+</div>

http://git-wip-us.apache.org/repos/asf/ambari/blob/1e89d1d5/ambari-web/app/templates/main/admin/stack_upgrade/versions.hbs
----------------------------------------------------------------------
diff --git a/ambari-web/app/templates/main/admin/stack_upgrade/versions.hbs b/ambari-web/app/templates/main/admin/stack_upgrade/versions.hbs
index b14e5bc..6e2b057 100644
--- a/ambari-web/app/templates/main/admin/stack_upgrade/versions.hbs
+++ b/ambari-web/app/templates/main/admin/stack_upgrade/versions.hbs
@@ -16,23 +16,22 @@
 * limitations under the License.
 }}
 
-
-<div>
+<div id="versions-filter-section">
   <div class="btn-group display-inline-block">
     <a class="btn dropdown-toggle" data-toggle="dropdown" href="#">
       <span class="filters-label">{{t common.filter}}: </span>
-            <span>
-              {{view.selectedCategory.label}}
-              <span class="caret"></span>
-            </span>
+        <span>
+          {{view.selectedCategory.label}}
+          <span class="caret"></span>
+        </span>
     </a>
     <ul class="dropdown-menu">
       {{#each category in view.categories}}
-          <li>
-            <a {{action selectCategory category target="view"}} href="#">
-              {{category.label}}
-            </a>
-          </li>
+        <li>
+          <a {{action selectCategory category target="view"}} href="#">
+            {{category.label}}
+          </a>
+        </li>
       {{/each}}
     </ul>
   </div>
@@ -42,14 +41,32 @@
     </button>
   {{/isAuthorized}}
 </div>
-<div class="row-fluid">
-  {{#if isLoaded}}
-    {{#each version in view.repoVersions}}
-      {{#if version.isVisible}}
-        {{view App.UpgradeVersionBoxView contentBinding="version"}}
-      {{/if}}
-    {{/each}}
-  {{else}}
-    {{view App.SpinnerView}}
-  {{/if}}
-</div>
+<div id="versions-section" class="row-fluid">
+  <div class="span2 left-menu-table">
+    <table class="table">
+      <thead>
+        <tr><th>{{t common.default}}</th></tr>
+      </thead>
+      <tbody>
+        {{#each service in view.services}}
+          <tr>
+            <td class="service-display-name">{{service.displayName}}</td>
+          </tr>
+        {{/each}}
+      </tbody>
+    </table>
+  </div>
+  <div class="span10 versions-slides">
+    {{#if isLoaded}}
+      <div class="versions-slides-bar">
+        {{#each version in view.repoVersions}}
+          {{#if version.isVisible}}
+            {{view App.UpgradeVersionColumnView contentBinding="version"}}
+          {{/if}}
+        {{/each}}
+      </div>
+    {{else}}
+      {{view App.SpinnerView}}
+    {{/if}}
+  </div>
+</div>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/1e89d1d5/ambari-web/app/utils/host_progress_popup.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/utils/host_progress_popup.js b/ambari-web/app/utils/host_progress_popup.js
index 726a96a..6dd4558 100644
--- a/ambari-web/app/utils/host_progress_popup.js
+++ b/ambari-web/app/utils/host_progress_popup.js
@@ -482,8 +482,8 @@ App.HostPopup = Em.Object.create({
       barColor: status[2],
       isInProgress: status[3],
       barWidth: "width:" + service.progress + "%;",
-      sourceRequestScheduleId: service.get('sourceRequestScheduleId'),
-      contextCommand: service.get('contextCommand')
+      sourceRequestScheduleId: service.sourceRequestScheduleId,
+      contextCommand: service.contextCommand
     });
   },
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/1e89d1d5/ambari-web/app/views.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views.js b/ambari-web/app/views.js
index 2440086..4b6b857 100644
--- a/ambari-web/app/views.js
+++ b/ambari-web/app/views.js
@@ -181,6 +181,7 @@ require('views/main/admin/highAvailability/rangerAdmin/step4_view');
 require('views/main/admin/serviceAccounts_view');
 require('views/main/admin/stack_upgrade/upgrade_wizard_view');
 require('views/main/admin/stack_upgrade/upgrade_version_box_view');
+require('views/main/admin/stack_upgrade/upgrade_version_column_view');
 require('views/main/admin/stack_upgrade/upgrade_group_view');
 require('views/main/admin/stack_upgrade/upgrade_task_view');
 require('views/main/admin/stack_upgrade/services_view');

http://git-wip-us.apache.org/repos/asf/ambari/blob/1e89d1d5/ambari-web/app/views/main/admin/stack_upgrade/upgrade_version_box_view.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views/main/admin/stack_upgrade/upgrade_version_box_view.js b/ambari-web/app/views/main/admin/stack_upgrade/upgrade_version_box_view.js
index f8dd4f2..c6a1960 100644
--- a/ambari-web/app/views/main/admin/stack_upgrade/upgrade_version_box_view.js
+++ b/ambari-web/app/views/main/admin/stack_upgrade/upgrade_version_box_view.js
@@ -171,18 +171,18 @@ App.UpgradeVersionBoxView = Em.View.extend({
         var isDisabled = !App.isAuthorized('CLUSTER.UPGRADE_DOWNGRADE_STACK') || this.get('controller.requestInProgress') || isInstalling;
         element.set('isButtonGroup', true);
         if (status === 'OUT_OF_SYNC') {
-          element.set('text', Em.I18n.t('admin.stackVersions.version.reinstall'));
+          element.set('text', this.get('isVersionColumnView') ? Em.I18n.t('common.reinstall') : Em.I18n.t('admin.stackVersions.version.reinstall'));
           element.set('action', 'installRepoVersionConfirmation');
           element.get('buttons').pushObject({
-            text: Em.I18n.t('admin.stackVersions.version.performUpgrade'),
+            text: this.get('isVersionColumnView') ? Em.I18n.t('common.upgrade') : Em.I18n.t('admin.stackVersions.version.performUpgrade'),
             action: 'confirmUpgrade',
             isDisabled: isDisabled
           });
         } else {
-          element.set('text', Em.I18n.t('admin.stackVersions.version.performUpgrade'));
+          element.set('text', this.get('isVersionColumnView') ? Em.I18n.t('common.upgrade') : Em.I18n.t('admin.stackVersions.version.performUpgrade'));
           element.set('action', 'confirmUpgrade');
           element.get('buttons').pushObject({
-            text: Em.I18n.t('admin.stackVersions.version.reinstall'),
+            text: this.get('isVersionColumnView') ? Em.I18n.t('common.reinstall') : Em.I18n.t('admin.stackVersions.version.reinstall'),
             action: 'installRepoVersionConfirmation',
             isDisabled: isDisabled
           });
@@ -217,7 +217,8 @@ App.UpgradeVersionBoxView = Em.View.extend({
     }
     else if (isAborted) {
       element.setProperties(statePropertiesMap['SUSPENDED']);
-      element.set('text', this.get('controller.isDowngrade') ? Em.I18n.t('admin.stackUpgrade.dialog.resume.downgrade') : Em.I18n.t('admin.stackUpgrade.dialog.resume'));
+      var text = this.get('controller.isDowngrade') ? Em.I18n.t('admin.stackUpgrade.dialog.resume.downgrade') : Em.I18n.t('admin.stackUpgrade.dialog.resume');
+      element.set('text', this.get('isVersionColumnView') ? Em.I18n.t('common.resume'): text);
       element.set('isDisabled', this.get('controller.requestInProgress'));
     }
     return element;
@@ -243,6 +244,10 @@ App.UpgradeVersionBoxView = Em.View.extend({
     }, 1000);
   },
 
+  isPatch: function() {
+    return this.get('content.type') == "PATCH";
+  }.property('content.type'),
+
   /**
    * run custom action of controller
    */
@@ -274,7 +279,7 @@ App.UpgradeVersionBoxView = Em.View.extend({
 
     return stackVersion; 
   },
-  
+
   /**
    * show popup with repositories to edit
    * @return {App.ModalPopup}

http://git-wip-us.apache.org/repos/asf/ambari/blob/1e89d1d5/ambari-web/app/views/main/admin/stack_upgrade/upgrade_version_column_view.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views/main/admin/stack_upgrade/upgrade_version_column_view.js b/ambari-web/app/views/main/admin/stack_upgrade/upgrade_version_column_view.js
new file mode 100644
index 0000000..d7e26e2
--- /dev/null
+++ b/ambari-web/app/views/main/admin/stack_upgrade/upgrade_version_column_view.js
@@ -0,0 +1,106 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+var App = require('app');
+var stringUtils = require('utils/string_utils');
+
+App.UpgradeVersionColumnView = App.UpgradeVersionBoxView.extend({
+  templateName: require('templates/main/admin/stack_upgrade/upgrade_version_column'),
+  isVersionColumnView: true,
+  classNames: ['version-column'],
+
+  didInsertElement: function () {
+    App.tooltip($('.link-tooltip'), {title: Em.I18n.t('admin.stackVersions.version.linkTooltip')});
+    App.tooltip($('.out-of-sync-badge'), {title: Em.I18n.t('hosts.host.stackVersions.status.out_of_sync')});
+  },
+
+  services: function() {
+    var repoRecord = App.RepositoryVersion.find(this.get('content.id'));
+    var originalServices = repoRecord.get('services');
+    // sort the services in the order the same as service menu
+    var sorted = App.Service.find().map(function (service) {
+      var latestVersion = '';
+      if (originalServices.someProperty('name', service.get('serviceName'))){
+        latestVersion = originalServices.filterProperty('name', service.get('serviceName'))[0].get('latestVersion');
+      }
+      return Em.Object.create({
+        displayName: service.get('displayName'),
+        name: service.get('serviceName'),
+        latestVersion: latestVersion,
+        isVersionInvisible: latestVersion == false
+      });
+    });
+    return sorted;
+  }.property(),
+
+  /**
+   * map of properties which correspond to particular state of Upgrade version
+   * @type {object}
+   */
+  statePropertiesMap: {
+    'CURRENT': {
+      isLabel: true,
+      text: Em.I18n.t('common.current'),
+      class: 'label label-success'
+    },
+    'INIT': {
+      isButton: true,
+      text: Em.I18n.t('common.install'),
+      action: 'installRepoVersionConfirmation'
+    },
+    'LOADING': {
+      isSpinner: true,
+      class: 'spinner'
+    },
+    'INSTALLING': {
+      iconClass: 'icon-cog',
+      isLink: true,
+      text: Em.I18n.t('hosts.host.stackVersions.status.installing'),
+      action: 'showProgressPopup'
+    },
+    'INSTALLED': {
+      iconClass: 'icon-ok',
+      isLink: true,
+      text: Em.I18n.t('common.installed'),
+      action: null
+    },
+    'SUSPENDED': {
+      isButton: true,
+      text: Em.I18n.t('admin.stackUpgrade.dialog.resume'),
+      action: 'resumeUpgrade'
+    }
+  },
+
+  openVersionBoxPopup: function (event) {
+    var content = this.get('content');
+    var parentView = this.get('parentView');
+
+    return App.ModalPopup.show({
+      classNames: ['version-box-popup'],
+      bodyClass: App.UpgradeVersionBoxView.extend({
+        classNames: ['version-box-in-popup'],
+        content: content,
+        parentView: parentView
+      }),
+      header: Em.I18n.t('admin.stackVersions.version.column.showDetails.title'),
+      primary: Em.I18n.t('common.dismiss'),
+      secondary: null
+    });
+  }
+});
+

http://git-wip-us.apache.org/repos/asf/ambari/blob/1e89d1d5/ambari-web/app/views/main/admin/stack_upgrade/versions_view.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views/main/admin/stack_upgrade/versions_view.js b/ambari-web/app/views/main/admin/stack_upgrade/versions_view.js
index d535300..6b20247 100644
--- a/ambari-web/app/views/main/admin/stack_upgrade/versions_view.js
+++ b/ambari-web/app/views/main/admin/stack_upgrade/versions_view.js
@@ -31,6 +31,11 @@ App.MainAdminStackVersionsView = Em.View.extend({
   updateTimer: null,
 
   /**
+   * @type {Array}
+   */
+  services: App.Service.find(),
+
+  /**
    * Not Installed = the version is not installed or out of sync
    * Upgrade Ready = the version is installed and ready for upgrade
    * Current = the version currently being used


[16/50] [abbrv] ambari git commit: AMBARI-14834. Register Version: make the 'Repositories' category editable and addable.(XIWANG)

Posted by nc...@apache.org.
AMBARI-14834. Register Version: make the 'Repositories' category editable and addable.(XIWANG)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/efc3a07d
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/efc3a07d
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/efc3a07d

Branch: refs/heads/trunk
Commit: efc3a07da28857221e21592a6b62210b41dffdbb
Parents: 0b47564
Author: Xi Wang <xi...@apache.org>
Authored: Wed Feb 3 16:07:04 2016 -0800
Committer: Xi Wang <xi...@apache.org>
Committed: Wed Feb 3 16:07:04 2016 -0800

----------------------------------------------------------------------
 .../app/assets/data/version/version.json        |  83 ++----
 .../stackVersions/StackVersionsCreateCtrl.js    | 281 +++++++++----------
 .../ui/admin-web/app/scripts/i18n.config.js     |   2 +-
 .../ui/admin-web/app/scripts/services/Stack.js  | 110 ++------
 .../resources/ui/admin-web/app/styles/main.css  |   8 +
 .../views/stackVersions/stackVersionPage.html   |  40 ++-
 6 files changed, 218 insertions(+), 306 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/efc3a07d/ambari-admin/src/main/resources/ui/admin-web/app/assets/data/version/version.json
----------------------------------------------------------------------
diff --git a/ambari-admin/src/main/resources/ui/admin-web/app/assets/data/version/version.json b/ambari-admin/src/main/resources/ui/admin-web/app/assets/data/version/version.json
index d6530e2..95c063a 100644
--- a/ambari-admin/src/main/resources/ui/admin-web/app/assets/data/version/version.json
+++ b/ambari-admin/src/main/resources/ui/admin-web/app/assets/data/version/version.json
@@ -1,26 +1,26 @@
 {
-  "href" : "http://c6401.ambari.apache.org:8080/api/v1/stacks/HDP/versions?fields=repository_versions/operatingSystems/repositories/*&repository_versions/RepositoryVersions/repository_version=2.2.0.1-901",
+  "href" : "http://c6401.ambari.apache.org:8080/api/v1/stacks/HDP/versions?fields=repository_versions/operatingSystems/repositories/*&repository_versions/RepositoryVersions/repository_version=2.3.6.0-3509",
   "items" : [
     {
-      "href" : "http://c6401.ambari.apache.org:8080/api/v1/stacks/HDP/versions/2.2",
+      "href" : "http://c6401.ambari.apache.org:8080/api/v1/stacks/HDP/versions/2.3",
       "Versions" : {
         "stack_name" : "HDP",
-        "stack_version" : "2.2"
+        "stack_version" : "2.3"
       },
       "repository_versions" : [
         {
-          "href" : "http://c6401.ambari.apache.org:8080/api/v1/stacks/HDP/versions/2.2/repository_versions/15",
+          "href" : "http://c6401.ambari.apache.org:8080/api/v1/stacks/HDP/versions/2.3/repository_versions/15",
           "RepositoryVersions" : {
             "id" : 15,
-            "repository_version" : "2.2.0.1-901",
+            "repository_version" : "2.3.6.0-3509",
             "stack_name" : "HDP",
-            "stack_version" : "2.2",
+            "stack_version" : "2.3",
+            "type": "PATCH",
             "release": {
-              "type": "PATCH",
-              "stack_id": "HDP-2.2",
-              "version": "2.2.0.1",
-              "build": "901",
-              "compatible_with": "2.2.0.1-[1-9]",
+              "stack_id": "HDP-2.3",
+              "version": "2.3.6.0",
+              "build": "3509",
+              "compatible_with": "2.3.6.0-[1-9]",
               "release_notes": "http://someurl"
             },
             "services": [
@@ -57,83 +57,42 @@
           },
           "operating_systems" : [
             {
-              "href" : "http://c6401.ambari.apache.org:8080/api/v1/stacks/HDP/versions/2.2/repository_versions/15/operating_systems/redhat5",
-              "OperatingSystems" : {
-                "os_type" : "redhat5",
-                "repository_version_id" : 15,
-                "stack_name" : "HDP",
-                "stack_version" : "2.2"
-              },
-              "repositories" : [
-                {
-                  "href" : "http://c6401.ambari.apache.org:8080/api/v1/stacks/HDP/versions/2.2/repository_versions/15/operating_systems/redhat5/repositories/HDP-2.2.0.1-901",
-                  "Repositories" : {
-                    "base_url" : "http://s3.amazonaws.com/dev.hortonworks.com/HDP/centos5/2.x/updates/2.2.0.0",
-                    "default_base_url" : "",
-                    "latest_base_url" : "",
-                    "mirrors_list" : "",
-                    "os_type" : "redhat5",
-                    "repo_id" : "HDP-2.2.0.1-901",
-                    "repo_name" : "HDP",
-                    "repository_version_id" : 15,
-                    "stack_name" : "HDP",
-                    "stack_version" : "2.2"
-                  }
-                },
-                {
-                  "href" : "http://c6401.ambari.apache.org:8080/api/v1/stacks/HDP/versions/2.2/repository_versions/15/operating_systems/redhat5/repositories/HDP-UTILS-2.2.0.1-901",
-                  "Repositories" : {
-                    "base_url" : "http://s3.amazonaws.com/dev.hortonworks.com/HDP/centos5/2.x/updates/2.2.0.0",
-                    "default_base_url" : "",
-                    "latest_base_url" : "",
-                    "mirrors_list" : "",
-                    "os_type" : "redhat5",
-                    "repo_id" : "HDP-UTILS-2.2.0.1-901",
-                    "repo_name" : "HDP-UTILS",
-                    "repository_version_id" : 15,
-                    "stack_name" : "HDP",
-                    "stack_version" : "2.2"
-                  }
-                }
-              ]
-            },
-            {
-              "href" : "http://c6401.ambari.apache.org:8080/api/v1/stacks/HDP/versions/2.2/repository_versions/15/operating_systems/redhat6",
+              "href" : "http://c6401.ambari.apache.org:8080/api/v1/stacks/HDP/versions/2.3/repository_versions/15/operating_systems/redhat6",
               "OperatingSystems" : {
                 "os_type" : "redhat6",
                 "repository_version_id" : 15,
                 "stack_name" : "HDP",
-                "stack_version" : "2.2"
+                "stack_version" : "2.3"
               },
               "repositories" : [
                 {
-                  "href" : "http://c6401.ambari.apache.org:8080/api/v1/stacks/HDP/versions/2.2/repository_versions/15/operating_systems/redhat6/repositories/HDP-2.2.0.1-901",
+                  "href" : "http://c6401.ambari.apache.org:8080/api/v1/stacks/HDP/versions/2.3/repository_versions/15/operating_systems/redhat6/repositories/HDP-2.3.6.0-3509",
                   "Repositories" : {
-                    "base_url" : "http://s3.amazonaws.com/dev.hortonworks.com/HDP/centos5/2.x/updates/2.2.0.0",
+                    "base_url" : "http://s3.amazonaws.com/dev.hortonworks.com/HDP/centos6/2.x/BUILDS/2.3.6.0-3509",
                     "default_base_url" : "",
                     "latest_base_url" : "",
                     "mirrors_list" : "",
                     "os_type" : "redhat6",
-                    "repo_id" : "HDP-2.2.0.1-901",
+                    "repo_id" : "HDP-2.3.6.0-3509",
                     "repo_name" : "HDP",
                     "repository_version_id" : 15,
                     "stack_name" : "HDP",
-                    "stack_version" : "2.2"
+                    "stack_version" : "2.3"
                   }
                 },
                 {
-                  "href" : "http://c6401.ambari.apache.org:8080/api/v1/stacks/HDP/versions/2.2/repository_versions/15/operating_systems/redhat6/repositories/HDP-UTILS-2.2.0.1-901",
+                  "href" : "http://c6401.ambari.apache.org:8080/api/v1/stacks/HDP/versions/2.3/repository_versions/15/operating_systems/redhat6/repositories/HDP-UTILS-2.3.6.0-3509",
                   "Repositories" : {
-                    "base_url" : "http://s3.amazonaws.com/dev.hortonworks.com/HDP/centos5/2.x/updates/2.2.0.0",
+                    "base_url" : "http://s3.amazonaws.com/dev.hortonworks.com/HDP-UTILS-1.1.0.20/repos/centos6",
                     "default_base_url" : "",
                     "latest_base_url" : "",
                     "mirrors_list" : "",
                     "os_type" : "redhat6",
-                    "repo_id" : "HDP-UTILS-2.2.0.1-901",
+                    "repo_id" : "HDP-UTILS-2.3.6.0-3509",
                     "repo_name" : "HDP-UTILS",
                     "repository_version_id" : 15,
                     "stack_name" : "HDP",
-                    "stack_version" : "2.2"
+                    "stack_version" : "2.3"
                   }
                 }
               ]

http://git-wip-us.apache.org/repos/asf/ambari/blob/efc3a07d/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/stackVersions/StackVersionsCreateCtrl.js
----------------------------------------------------------------------
diff --git a/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/stackVersions/StackVersionsCreateCtrl.js b/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/stackVersions/StackVersionsCreateCtrl.js
index 81ad7f7..37f9c34 100644
--- a/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/stackVersions/StackVersionsCreateCtrl.js
+++ b/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/stackVersions/StackVersionsCreateCtrl.js
@@ -23,147 +23,26 @@ angular.module('ambariAdminConsole')
   $scope.createController = true;
   $scope.osList = [];
   $scope.skipValidation = false;
-  $scope.selectedOS = 0;
   $scope.repoSubversion = "";
 
   $scope.clusterName = $routeParams.clusterName;
   $scope.subversionPattern = /^\d+\.\d+(-\d+)?$/;
   $scope.upgradeStack = {
-    selected: null,
-    options: []
-  };
-  $scope.fetchStackVersionFilterList = function () {
-    return Stack.allStackVersions()
-    .then(function (allStackVersions) {
-      var versions = [];
-      angular.forEach(allStackVersions, function (version) {
-        if (version.upgrade_packs.length > 0 && version.active) {
-          versions.push(version);
-        }
-      });
-      $scope.upgradeStack.options = versions;
-      $scope.upgradeStack.selected = versions[versions.length - 1];
-      $scope.afterStackVersionChange();
-    })
-    .catch(function (data) {
-      Alert.error($t('versions.alerts.filterListError'), data.message);
-    });
-  };
-  $scope.fetchStackVersionFilterList();
-
-  $scope.save = function () {
-    return Stack.validateBaseUrls($scope.skipValidation, $scope.osList, $scope.upgradeStack.selected).then(function (invalidUrls) {
-      if (invalidUrls.length === 0) {
-        Stack.addRepo($scope.upgradeStack.selected, $scope.repoSubversion, $scope.osList)
-          .success(function () {
-            var versionName = $scope.upgradeStack.selected.stack_version + '.' + $scope.repoSubversion;
-            var stackName = $scope.upgradeStack.selected.stack_name;
-            Alert.success($t('versions.alerts.versionCreated'), {stackName: stackName, versionName: versionName});
-            $location.path('/stackVersions');
-          })
-          .error(function (data) {
-              Alert.error($t('versions.alerts.versionCreationError'), data.message);
-          });
-      } else {
-        Stack.highlightInvalidUrls(invalidUrls);
-      }
-    });
+    stack_name: '',
+    stack_version: '',
+    display_name: ''
   };
 
-  $scope.afterStackVersionChange = function () {
-    Stack.getSupportedOSList($scope.upgradeStack.selected.stack_name, $scope.upgradeStack.selected.stack_version)
-    .then(function (data) {
-      var operatingSystems = data.operating_systems;
-        $scope.osList = operatingSystems.map(function (os) {
-          os.selected = false;
-          os.repositories.forEach(function(repo) {
-            repo.Repositories.base_url = '';
-          });
-          return os;
-        });
-    })
-    .catch(function (data) {
-      Alert.error($t('versions.alerts.osListError'), data.message);
-    });
-  };
-
-  $scope.updateCurrentVersionInput = function () {
-    $scope.currentVersionInput = $scope.upgradeStack.selected.displayName + '.' + angular.element('[name="version"]')[0].value;
-  };
-
-  /**
-   * TODO create parent controller for StackVersionsEditCtrl and StackVersionsCreateCtrl and
-   * move this method to it
-   */
-  $scope.clearErrors = function() {
-    if ($scope.osList) {
-      $scope.osList.forEach(function(os) {
-        if (os.repositories) {
-          os.repositories.forEach(function(repo) {
-            repo.hasError = false;
-          })
-        }
-      });
-    }
-  };
-  /**
-   * TODO create parent controller for StackVersionsEditCtrl and StackVersionsCreateCtrl and
-   * move this method to it
-   */
-  $scope.clearError = function() {
-    this.repository.hasError = false;
-  };
-  /**
-   * TODO create parent controller for StackVersionsEditCtrl and StackVersionsCreateCtrl and
-   * move this method to it
-   */
-  $scope.toggleOSSelect = function () {
-    this.os.repositories.forEach(function(repo) {
-      repo.hasError = false;
-    });
-    this.os.selected ? $scope.selectedOS++ : $scope.selectedOS--;
-  };
-  /**
-   * TODO create parent controller for StackVersionsEditCtrl and StackVersionsCreateCtrl and
-   * move this method to it
-   */
-  $scope.hasValidationErrors = function() {
-    var hasErrors = false;
-    if ($scope.osList) {
-      $scope.osList.forEach(function(os) {
-        if (os.repositories) {
-          os.repositories.forEach(function(repo) {
-            if (repo.hasError) {
-              hasErrors = true;
-            }
-          })
-        }
-      });
-    }
-    return hasErrors;
-  };
-  /**
-   * TODO create parent controller for StackVersionsEditCtrl and StackVersionsCreateCtrl and
-   * move this method to it
-   */
-  $scope.cancel = function () {
-    $scope.editVersionDisabled = true;
-    $location.path('/stackVersions');
-  };
-
-  // two options to upload version definition file
   $scope.option1 = {
     index: 1,
     displayName: 'Upload Version Definition File',
     url: 'files://',
-    //selected: true,
     hasError: false
   };
   $scope.option2 = {
     index: 2,
     displayName: 'Version Definition File URL',
     url: 'https://',
-    //selected: false,
     hasError: false
   };
   $scope.selectedOption = 1;
@@ -172,8 +51,6 @@ angular.module('ambariAdminConsole')
    * User can select ONLY one option to upload version definition file
    */
   $scope.toggleOptionSelect = function () {
-    //$scope.option1.selected = $scope.selectedOption == $scope.option1.index;
-    //$scope.option2.selected = $scope.selectedOption == $scope.option2.index;
     $scope.option1.hasError = false;
     $scope.option2.hasError = false;
   };
@@ -219,6 +96,11 @@ angular.module('ambariAdminConsole')
       $scope.displayName = response.displayName || 'n/a';
       $scope.version = response.version || 'n/a';
       $scope.actualVersion = response.actualVersion || 'n/a';
+      $scope.upgradeStack = {
+        stack_name: response.stackName,
+        stack_version: response.stackVersion,
+        display_name: response.displayName
+      };
       $scope.services = response.services || [];
       //save default values of repos to check if they were changed
       $scope.defaulfOSRepos = {};
@@ -229,26 +111,141 @@ angular.module('ambariAdminConsole')
         };
       });
       $scope.repoVersionFullName = response.repoVersionFullName;
+      $scope.selectedOS = [];
       angular.forEach(response.osList, function (os) {
         os.selected = true;
+        $scope.selectedOS.push(os.OperatingSystems.os_type);
       });
-      $scope.selectedOS = response.osList.length;
       $scope.osList = response.osList;
-      // if user reach here from UI click, repo status should be cached
-      // otherwise re-fetch repo status from cluster end point.
-//      $scope.repoStatus = Cluster.repoStatusCache[$scope.id];
-//      if (!$scope.repoStatus) {
-//        $scope.fetchClusters()
-//          .then(function () {
-//            return $scope.fetchRepoClusterStatus();
-//          })
-//          .then(function () {
-//            $scope.deleteEnabled = $scope.isDeletable();
-//          });
-//      } else {
-//        $scope.deleteEnabled = $scope.isDeletable();
-//      }
-      //$scope.addMissingOSList();
+      // load supported os type base on stack version
+      $scope.afterStackVersionRead();
     });
   };
+
+  /**
+   * Load supported OS list
+   */
+  $scope.afterStackVersionRead = function () {
+    Stack.getSupportedOSList($scope.upgradeStack.stack_name, $scope.upgradeStack.stack_version)
+      .then(function (data) {
+        var operatingSystems = data.operating_systems;
+        operatingSystems.map(function (os) {
+          // os not in the list, mark as un-selected, add this to the osList
+          if ($scope.selectedOS.indexOf(os.OperatingSystems.os_type) < 0) {
+            os.selected = false;
+            os.repositories.forEach(function(repo) {
+              repo.Repositories.base_url = '';
+            });
+            $scope.osList.push(os);
+          }
+        });
+      })
+      .catch(function (data) {
+        Alert.error($t('versions.alerts.osListError'), data.message);
+      });
+  };
+
+  /**
+   * On click handler for removing OS
+   */
+  $scope.removeOS = function() {
+    this.os.selected = false;
+    if (this.os.repositories) {
+      this.os.repositories.forEach(function(repo) {
+        repo.hasError = false;
+      });
+    }
+  };
+  /**
+   * On click handler for adding new OS
+   */
+  $scope.addOS = function() {
+    this.os.selected = true;
+    if (this.os.repositories) {
+      this.os.repositories.forEach(function(repo) {
+        repo.hasError = false;
+      });
+    }
+  };
+
+  $scope.isSaveButtonDisabled = function() {
+    var enabled = false;
+    $scope.osList.forEach(function(os) {
+      if (os.selected) {
+        enabled = true
+      }
+    });
+    return !enabled;
+  }
+
+  $scope.save = function () {
+    return Stack.validateBaseUrls($scope.skipValidation, $scope.osList, $scope.upgradeStack).then(function (invalidUrls) {
+      if (invalidUrls.length === 0) {
+        Stack.addRepo($scope.upgradeStack, $scope.actualVersion, $scope.osList)
+          .success(function () {
+            var versionName = $scope.actualVersion + '';
+            var stackName = $scope.upgradeStack.stack_name;
+            Alert.success($t('versions.alerts.versionCreated'), {stackName: stackName, versionName: versionName});
+            $location.path('/stackVersions');
+          })
+          .error(function (data) {
+              Alert.error($t('versions.alerts.versionCreationError'), data.message);
+          });
+      } else {
+        Stack.highlightInvalidUrls(invalidUrls);
+      }
+    });
+  };
+  /**
+   * TODO create parent controller for StackVersionsEditCtrl and StackVersionsCreateCtrl and
+   * move this method to it
+   */
+  $scope.cancel = function () {
+    $scope.editVersionDisabled = true;
+    $location.path('/stackVersions');
+  };
+
+  /**
+   * TODO create parent controller for StackVersionsEditCtrl and StackVersionsCreateCtrl and
+   * move this method to it
+   */
+  $scope.clearErrors = function() {
+    if ($scope.osList) {
+      $scope.osList.forEach(function(os) {
+        if (os.repositories) {
+          os.repositories.forEach(function(repo) {
+            repo.hasError = false;
+          })
+        }
+      });
+    }
+  };
+
+  /**
+   * TODO create parent controller for StackVersionsEditCtrl and StackVersionsCreateCtrl and
+   * move this method to it
+   */
+  $scope.clearError = function() {
+    this.repository.hasError = false;
+  };
+
+  /**
+   * TODO create parent controller for StackVersionsEditCtrl and StackVersionsCreateCtrl and
+   * move this method to it
+   */
+  $scope.hasValidationErrors = function() {
+    var hasErrors = false;
+    if ($scope.osList) {
+      $scope.osList.forEach(function(os) {
+        if (os.repositories) {
+          os.repositories.forEach(function(repo) {
+            if (repo.hasError) {
+              hasErrors = true;
+            }
+          })
+        }
+      });
+    }
+    return hasErrors;
+  };
 }]);

http://git-wip-us.apache.org/repos/asf/ambari/blob/efc3a07d/ambari-admin/src/main/resources/ui/admin-web/app/scripts/i18n.config.js
----------------------------------------------------------------------
diff --git a/ambari-admin/src/main/resources/ui/admin-web/app/scripts/i18n.config.js b/ambari-admin/src/main/resources/ui/admin-web/app/scripts/i18n.config.js
index ed42b45..9f6add0 100644
--- a/ambari-admin/src/main/resources/ui/admin-web/app/scripts/i18n.config.js
+++ b/ambari-admin/src/main/resources/ui/admin-web/app/scripts/i18n.config.js
@@ -323,7 +323,7 @@ angular.module('ambariAdminConsole')
       },
 
       'alerts': {
-        'baseURLs': 'Provide Base URLs for the Operating Systems you are configuring. Uncheck all other Operating Systems.',
+        'baseURLs': 'Provide Base URLs for the Operating Systems you are configuring.',
         'validationFailed': 'Some of the repositories failed validation. Make changes to the base url or skip validation if you are sure that urls are correct',
         'skipValidationWarning': '<b>Warning:</b> This is for advanced users only. Use this option if you want to skip validation for Repository Base URLs.',
         'filterListError': 'Fetch stack version filter list error',

http://git-wip-us.apache.org/repos/asf/ambari/blob/efc3a07d/ambari-admin/src/main/resources/ui/admin-web/app/scripts/services/Stack.js
----------------------------------------------------------------------
diff --git a/ambari-admin/src/main/resources/ui/admin-web/app/scripts/services/Stack.js b/ambari-admin/src/main/resources/ui/admin-web/app/scripts/services/Stack.js
index aa81b5d..a12b430 100644
--- a/ambari-admin/src/main/resources/ui/admin-web/app/scripts/services/Stack.js
+++ b/ambari-admin/src/main/resources/ui/admin-web/app/scripts/services/Stack.js
@@ -116,11 +116,11 @@ angular.module('ambariAdminConsole')
       return deferred.promise;
     },
 
-    addRepo: function (stack, repoSubversion, osList) {
+    addRepo: function (stack, actualVersion, osList) {
       var url = '/stacks/' + stack.stack_name + '/versions/' + stack.stack_version + '/repository_versions/';
       var payload = {};
       var payloadWrap = { RepositoryVersions : payload };
-      payload.repository_version = stack.stack_version + '.' + repoSubversion;
+      payload.repository_version = actualVersion;
       payload.display_name = stack.stack_name + '-' + payload.repository_version;
       payloadWrap.operating_systems = [];
       osList.forEach(function (osItem) {
@@ -182,25 +182,25 @@ angular.module('ambariAdminConsole')
         .success(function (data) {
           //data = data.items[0];
           data = {
-            "href" : "http://c6401.ambari.apache.org:8080/api/v1/stacks/HDP/versions/2.2",
+            "href" : "http://c6401.ambari.apache.org:8080/api/v1/stacks/HDP/versions/2.3",
             "Versions" : {
               "stack_name" : "HDP",
-              "stack_version" : "2.2"
+              "stack_version" : "2.3"
             },
             "repository_versions" : [
               {
-                "href" : "http://c6401.ambari.apache.org:8080/api/v1/stacks/HDP/versions/2.2/repository_versions/15",
+                "href" : "http://c6401.ambari.apache.org:8080/api/v1/stacks/HDP/versions/2.3/repository_versions/15",
                 "RepositoryVersions" : {
                   "id" : 15,
-                  "repository_version" : "2.2.0.1-901",
+                  "repository_version" : "2.3.6.0-3509",
                   "stack_name" : "HDP",
-                  "stack_version" : "2.2",
+                  "stack_version" : "2.3",
+                  "type": "PATCH",
                   "release": {
-                    "type": "PATCH",
-                    "stack_id": "HDP-2.2",
-                    "version": "2.2.0.1",
-                    "build": "901",
-                    "compatible_with": "2.2.0.1-[1-9]",
+                    "stack_id": "HDP-2.3",
+                    "version": "2.3.6.0",
+                    "build": "3509",
+                    "compatible_with": "2.3.6.0-[1-9]",
                     "release_notes": "http://someurl"
                   },
                   "services": [
@@ -232,112 +232,47 @@ angular.module('ambariAdminConsole')
                           "version": "3.4.5"
                         }
                       ]
-                    },
-                    {
-                      "name": "YARN",
-                      "versions": [
-                        {
-                          "version": "2.7.1"
-                        }
-                      ]
-                    },
-                    {
-                      "name": "SPARK",
-                      "versions": [
-                        {
-                          "version": "1.4.1"
-                        }
-                      ]
-                    },
-                    {
-                      "name": "SPARK",
-                      "versions": [
-                        {
-                          "version": "1.5.2"
-                        }
-                      ]
                     }
                   ]
                 },
                 "operating_systems" : [
                   {
-                    "href" : "http://c6401.ambari.apache.org:8080/api/v1/stacks/HDP/versions/2.2/repository_versions/15/operating_systems/redhat5",
-                    "OperatingSystems" : {
-                      "os_type" : "redhat5",
-                      "repository_version_id" : 15,
-                      "stack_name" : "HDP",
-                      "stack_version" : "2.2"
-                    },
-                    "repositories" : [
-                      {
-                        "href" : "http://c6401.ambari.apache.org:8080/api/v1/stacks/HDP/versions/2.2/repository_versions/15/operating_systems/redhat5/repositories/HDP-2.2.0.1-901",
-                        "Repositories" : {
-                          "base_url" : "http://s3.amazonaws.com/dev.hortonworks.com/HDP/centos5/2.x/updates/2.2.0.0",
-                          "default_base_url" : "",
-                          "latest_base_url" : "",
-                          "mirrors_list" : "",
-                          "os_type" : "redhat5",
-                          "repo_id" : "HDP-2.2.0.1-901",
-                          "repo_name" : "HDP",
-                          "repository_version_id" : 15,
-                          "stack_name" : "HDP",
-                          "stack_version" : "2.2"
-                        }
-                      },
-                      {
-                        "href" : "http://c6401.ambari.apache.org:8080/api/v1/stacks/HDP/versions/2.2/repository_versions/15/operating_systems/redhat5/repositories/HDP-UTILS-2.2.0.1-901",
-                        "Repositories" : {
-                          "base_url" : "http://s3.amazonaws.com/dev.hortonworks.com/HDP/centos5/2.x/updates/2.2.0.0",
-                          "default_base_url" : "",
-                          "latest_base_url" : "",
-                          "mirrors_list" : "",
-                          "os_type" : "redhat5",
-                          "repo_id" : "HDP-UTILS-2.2.0.1-901",
-                          "repo_name" : "HDP-UTILS",
-                          "repository_version_id" : 15,
-                          "stack_name" : "HDP",
-                          "stack_version" : "2.2"
-                        }
-                      }
-                    ]
-                  },
-                  {
-                    "href" : "http://c6401.ambari.apache.org:8080/api/v1/stacks/HDP/versions/2.2/repository_versions/15/operating_systems/redhat6",
+                    "href" : "http://c6401.ambari.apache.org:8080/api/v1/stacks/HDP/versions/2.3/repository_versions/15/operating_systems/redhat6",
                     "OperatingSystems" : {
                       "os_type" : "redhat6",
                       "repository_version_id" : 15,
                       "stack_name" : "HDP",
-                      "stack_version" : "2.2"
+                      "stack_version" : "2.3"
                     },
                     "repositories" : [
                       {
-                        "href" : "http://c6401.ambari.apache.org:8080/api/v1/stacks/HDP/versions/2.2/repository_versions/15/operating_systems/redhat6/repositories/HDP-2.2.0.1-901",
+                        "href" : "http://c6401.ambari.apache.org:8080/api/v1/stacks/HDP/versions/2.3/repository_versions/15/operating_systems/redhat6/repositories/HDP-2.3.6.0-3509",
                         "Repositories" : {
-                          "base_url" : "http://s3.amazonaws.com/dev.hortonworks.com/HDP/centos5/2.x/updates/2.2.0.0",
+                          "base_url" : "http://s3.amazonaws.com/dev.hortonworks.com/HDP/centos6/2.x/BUILDS/2.3.6.0-3509",
                           "default_base_url" : "",
                           "latest_base_url" : "",
                           "mirrors_list" : "",
                           "os_type" : "redhat6",
-                          "repo_id" : "HDP-2.2.0.1-901",
+                          "repo_id" : "HDP-2.3.6.0-3509",
                           "repo_name" : "HDP",
                           "repository_version_id" : 15,
                           "stack_name" : "HDP",
-                          "stack_version" : "2.2"
+                          "stack_version" : "2.3"
                         }
                       },
                       {
-                        "href" : "http://c6401.ambari.apache.org:8080/api/v1/stacks/HDP/versions/2.2/repository_versions/15/operating_systems/redhat6/repositories/HDP-UTILS-2.2.0.1-901",
+                        "href" : "http://c6401.ambari.apache.org:8080/api/v1/stacks/HDP/versions/2.3/repository_versions/15/operating_systems/redhat6/repositories/HDP-UTILS-2.3.6.0-3509",
                         "Repositories" : {
-                          "base_url" : "http://s3.amazonaws.com/dev.hortonworks.com/HDP/centos5/2.x/updates/2.2.0.0",
+                          "base_url" : "http://s3.amazonaws.com/dev.hortonworks.com/HDP-UTILS-1.1.0.20/repos/centos6",
                           "default_base_url" : "",
                           "latest_base_url" : "",
                           "mirrors_list" : "",
                           "os_type" : "redhat6",
-                          "repo_id" : "HDP-UTILS-2.2.0.1-901",
+                          "repo_id" : "HDP-UTILS-2.3.6.0-3509",
                           "repo_name" : "HDP-UTILS",
                           "repository_version_id" : 15,
                           "stack_name" : "HDP",
-                          "stack_version" : "2.2"
+                          "stack_version" : "2.3"
                         }
                       }
                     ]
@@ -346,6 +281,7 @@ angular.module('ambariAdminConsole')
               }
             ]
           };
+
           var response = {
             id : data.repository_versions[0].RepositoryVersions.id,
             stackVersion : data.Versions.stack_version,

http://git-wip-us.apache.org/repos/asf/ambari/blob/efc3a07d/ambari-admin/src/main/resources/ui/admin-web/app/styles/main.css
----------------------------------------------------------------------
diff --git a/ambari-admin/src/main/resources/ui/admin-web/app/styles/main.css b/ambari-admin/src/main/resources/ui/admin-web/app/styles/main.css
index 8385e48..83d4ffa 100644
--- a/ambari-admin/src/main/resources/ui/admin-web/app/styles/main.css
+++ b/ambari-admin/src/main/resources/ui/admin-web/app/styles/main.css
@@ -1402,6 +1402,14 @@ thead.view-permission-header > tr > th {
   color: #ff4500;
 }
 
+.register-version-form .remove-icon {
+  color: red;
+  margin: 20px 0px;
+  padding: 0px;
+  text-align: center;
+  cursor: pointer;
+}
+
 .register-version-form .version-info {
   padding-top: 7px;
   margin-top: 0;

http://git-wip-us.apache.org/repos/asf/ambari/blob/efc3a07d/ambari-admin/src/main/resources/ui/admin-web/app/views/stackVersions/stackVersionPage.html
----------------------------------------------------------------------
diff --git a/ambari-admin/src/main/resources/ui/admin-web/app/views/stackVersions/stackVersionPage.html b/ambari-admin/src/main/resources/ui/admin-web/app/views/stackVersions/stackVersionPage.html
index 488f077..61d2d8d 100644
--- a/ambari-admin/src/main/resources/ui/admin-web/app/views/stackVersions/stackVersionPage.html
+++ b/ambari-admin/src/main/resources/ui/admin-web/app/views/stackVersions/stackVersionPage.html
@@ -69,7 +69,7 @@
       <div class="clearfix">
         <label class="control-label col-sm-3">{{'versions.details.stackName' | translate}}</label>
         <div class="version-info col-sm-7">{{stackNameVersion}}</div>
-        <div class="col-sm-2 patch-icon" ng-if="isPatch"><span class="glyphicon glyphicon-tree-deciduous">Patch</span></div>
+        <div class="col-sm-2 patch-icon" ng-if="isPatch"><span class="glyphicon glyphicon-tree-deciduous"></span>Patch</div>
       </div>
       <div class="clearfix">
         <label class="control-label col-sm-3">{{'versions.details.displayName' | translate}}</label>
@@ -109,22 +109,34 @@
         <div class="name-label-adjust col-sm-2"><h5><label>{{'common.name' | translate}}</label></h5></div>
         <div class="col-sm-7"><h5><label >{{'versions.baseURL' | translate}}</label></h5></div>
       </div>
-      <div class="clearfix border-bottom bottom-margin" ng-repeat="os in osList">
-        <div class="col-sm-2 os-checkbox">
-          <div class="checkbox">
-            <label>
-              <input type="checkbox" ng-model="os.selected" ng-change="toggleOSSelect()"> {{os.OperatingSystems.os_type}}
-            </label>
-          </div>
-        </div>
-        <div class="col-sm-10">
-          <div class="form-group {{repository.Repositories.repo_name}}" ng-class="{'has-error': repository.hasError }" ng-repeat="repository in os.repositories">
-            <div class="col-sm-3"><label class="control-label">{{repository.Repositories.repo_name}}</label></div>
-            <div class="col-sm-9"><input type="text" class="form-control" ng-model="repository.Repositories.base_url"
+      <div class="alert alert-info hide-soft" ng-class="{'visible' : !osList || !osList.length}" role="alert">{{'versions.contents.empty' | translate}}</div>
+      <div class="" ng-repeat="os in osList">
+        <div ng-if="os.selected==true">
+          <div class="clearfix border-bottom bottom-margin">
+            <!-- show selected os in list table-->
+            <div class="col-sm-2">
+              <div class="">
+                <label>{{os.OperatingSystems.os_type}}</label>
+              </div>
+            </div>
+            <div class="col-sm-9">
+              <div class="form-group {{repository.Repositories.repo_name}}" ng-class="{'has-error': repository.hasError }" ng-repeat="repository in os.repositories">
+                <div class="col-sm-3"><label class="control-label">{{repository.Repositories.repo_name}}</label></div>
+                <div class="col-sm-9"><input type="text" class="form-control" ng-model="repository.Repositories.base_url"
                                          ng-change="clearError()" ng-disabled="!os.selected"></div>
+              </div>
+            </div>
+            <div class="col-sm-1 remove-icon" ng-click="removeOS()"><span class="glyphicon glyphicon-minus"></span>Remove</div>
           </div>
         </div>
       </div>
+        <div class="btn-group pull-right" dropdown>
+          <button class="btn dropdown-toggle">
+            <span class="glyphicon glyphicon-plus"></span> Add OS &nbsp;<span class="caret"></span></button>
+          <ul class="dropdown-menu">
+            <li ng-repeat="os in osList"><a ng-if="os.selected==false" ng-click="addOS()">{{os.OperatingSystems.os_type}}</a></li>
+          </ul>
+        </div>
       <div class="clearfix">
         <div class="col-sm-12" id="skip-validation">
           <div class="checkbox">
@@ -139,7 +151,7 @@
   </div>
   <div class="col-sm-12">
     <button class="btn btn-primary pull-right left-margin" ng-click="save()"
-            ng-disabled="(createController && repoRegForm.version.$invalid) || selectedOS === 0">{{'common.controls.save' | translate}}</button>
+            ng-disabled="isSaveButtonDisabled()">{{'common.controls.save' | translate}}</button>
     <button class="btn btn-default pull-right" ng-click="cancel()">{{'common.controls.cancel' | translate}}</button>
   </div>
 </form>


[28/50] [abbrv] ambari git commit: Merge branch 'trunk' into branch-dev-patch-upgrade

Posted by nc...@apache.org.
Merge branch 'trunk' into branch-dev-patch-upgrade


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/718f2ea1
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/718f2ea1
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/718f2ea1

Branch: refs/heads/trunk
Commit: 718f2ea1832eb10f666fd0519c0cf0c7005b6d4a
Parents: 1e89d1d 0ce5fea
Author: Jonathan Hurley <jh...@hortonworks.com>
Authored: Tue Feb 16 13:29:05 2016 -0500
Committer: Jonathan Hurley <jh...@hortonworks.com>
Committed: Tue Feb 16 13:29:05 2016 -0500

----------------------------------------------------------------------
 .../main/resources/ui/admin-web/app/index.html  |    1 +
 .../loginActivities/HomeDirectoryCtrl.js        |   31 +
 .../stackVersions/StackVersionsCreateCtrl.js    |    1 +
 .../stackVersions/StackVersionsEditCtrl.js      |    1 +
 .../ui/admin-web/app/scripts/i18n.config.js     |   13 +-
 .../resources/ui/admin-web/app/styles/main.css  |    3 +
 .../views/loginActivities/homeDirectory.html    |   63 +-
 .../app/views/loginActivities/loginMessage.html |    2 +-
 .../views/stackVersions/stackVersionPage.html   |    8 +
 .../src/main/python/ambari_agent/ActionQueue.py |    3 +-
 .../python/ambari_agent/alerts/port_alert.py    |    2 +-
 .../test/python/ambari_agent/TestActionQueue.py |   59 +
 .../timeline/AbstractTimelineMetricsSink.java   |   92 +-
 .../src/main/conf/flume-metrics2.properties.j2  |    3 +-
 .../sink/flume/FlumeTimelineMetricsSink.java    |   11 +-
 .../conf/hadoop-metrics2-hbase.properties.j2    |    8 +-
 .../src/main/conf/hadoop-metrics2.properties.j2 |   22 +-
 .../timeline/HadoopTimelineMetricsSink.java     |   13 +-
 .../timeline/HadoopTimelineMetricsSinkTest.java |    6 +-
 .../conf/unix/metric_monitor.ini                |    1 +
 .../src/main/python/core/config_reader.py       |    6 +-
 .../src/main/python/core/emitter.py             |   18 +-
 .../kafka/KafkaTimelineMetricsReporter.java     |   17 +-
 .../kafka/KafkaTimelineMetricsReporterTest.java |    2 +-
 .../storm/StormTimelineMetricsReporter.java     |   24 +-
 .../sink/storm/StormTimelineMetricsSink.java    |    8 +-
 .../conf/unix/ambari-metrics-collector          |    2 +-
 .../ApplicationHistoryServer.java               |   11 +-
 .../loadsimulator/net/RestMetricsSender.java    |    6 +-
 .../timeline/TimelineMetricConfiguration.java   |    6 +-
 ambari-server/etc/init/ambari-server.conf       |   33 +
 ambari-server/pom.xml                           |   12 +-
 ambari-server/sbin/ambari-server                |    6 +-
 ambari-server/src/main/assemblies/server.xml    |    4 +
 .../server/checks/CheckDatabaseHelper.java      |  473 ++++
 .../ComponentSSLConfiguration.java              |   14 +-
 .../server/configuration/Configuration.java     |    6 +-
 .../AmbariManagementControllerImpl.java         |    6 +-
 .../BlueprintConfigurationProcessor.java        |    4 +-
 .../internal/HostResourceProvider.java          |   12 +-
 .../internal/PermissionResourceProvider.java    |    3 +
 .../internal/ScaleClusterRequest.java           |   28 +-
 .../ganglia/GangliaPropertyProvider.java        |    2 +-
 .../ganglia/GangliaReportPropertyProvider.java  |    2 +-
 .../metrics/timeline/AMSPropertyProvider.java   |    8 +-
 .../timeline/AMSReportPropertyProvider.java     |    2 +-
 .../server/orm/entities/PermissionEntity.java   |   35 +-
 .../security/ldap/AmbariLdapDataPopulator.java  |   11 +-
 .../server/security/ldap/LdapBatchDto.java      |    5 +
 .../upgrades/RangerConfigCalculation.java       |   14 +-
 .../server/state/cluster/ClusterImpl.java       |   12 +-
 .../state/stack/upgrade/ClusterGrouping.java    |    7 +
 .../state/stack/upgrade/ExecuteHostType.java    |    7 +
 .../state/stack/upgrade/TaskWrapperBuilder.java |   15 +-
 .../server/topology/ClusterTopologyImpl.java    |   51 +-
 .../server/upgrade/UpgradeCatalog230.java       |    1 +
 .../server/upgrade/UpgradeCatalog240.java       |   32 +-
 ambari-server/src/main/python/ambari-server.py  |    7 +-
 .../main/python/ambari_server/checkDatabase.py  |   80 +
 .../main/python/ambari_server/setupActions.py   |    1 +
 .../src/main/python/ambari_server/utils.py      |    4 +-
 .../src/main/python/ambari_server_main.py       |   19 +-
 .../main/resources/Ambari-DDL-Derby-CREATE.sql  |   17 +-
 .../main/resources/Ambari-DDL-MySQL-CREATE.sql  |   17 +-
 .../main/resources/Ambari-DDL-Oracle-CREATE.sql |   19 +-
 .../resources/Ambari-DDL-Postgres-CREATE.sql    |   17 +-
 .../Ambari-DDL-Postgres-EMBEDDED-CREATE.sql     |   17 +-
 .../resources/Ambari-DDL-SQLAnywhere-CREATE.sql |   17 +-
 .../resources/Ambari-DDL-SQLServer-CREATE.sql   |   17 +-
 .../ACCUMULO/1.6.1.2.2.0/metainfo.xml           |    1 +
 .../1.6.1.2.2.0/package/scripts/params.py       |    9 +-
 .../hadoop-metrics2-accumulo.properties.j2      |    7 +-
 .../0.1.0/configuration/ams-hbase-env.xml       |    6 +-
 .../0.1.0/configuration/ams-site.xml            |   14 +-
 .../0.1.0/configuration/ams-ssl-client.xml      |   37 +
 .../0.1.0/configuration/ams-ssl-server.xml      |   64 +
 .../AMBARI_METRICS/0.1.0/metainfo.xml           |    9 +
 .../AMBARI_METRICS/0.1.0/package/scripts/ams.py |    8 +
 .../package/scripts/metrics_grafana_util.py     |    2 +-
 .../0.1.0/package/scripts/params.py             |   12 +-
 .../0.1.0/package/scripts/service_check.py      |   17 +-
 .../hadoop-metrics2-hbase.properties.j2         |    7 +-
 .../package/templates/metric_monitor.ini.j2     |    1 +
 .../metrics_grafana_datasource.json.j2          |    4 +-
 .../0.1.0/quickLinks/quicklinks.json            |   34 +
 .../FLUME/1.4.0.2.0/metainfo.xml                |    1 +
 .../FLUME/1.4.0.2.0/package/scripts/params.py   |    9 +-
 .../templates/flume-metrics2.properties.j2      |    8 +-
 .../common-services/HAWQ/2.0.0/metainfo.xml     |    1 +
 .../HBASE/0.96.0.2.0/metainfo.xml               |    1 +
 .../0.96.0.2.0/package/scripts/params_linux.py  |   10 +-
 ...-metrics2-hbase.properties-GANGLIA-MASTER.j2 |    7 +-
 ...doop-metrics2-hbase.properties-GANGLIA-RS.j2 |    7 +-
 .../common-services/HDFS/2.1.0.2.0/alerts.json  |   16 +-
 .../common-services/HDFS/2.1.0.2.0/metainfo.xml |    1 +
 .../HIVE/0.12.0.2.0/metainfo.xml                |    1 +
 .../package/scripts/hive_server_interactive.py  |   93 +
 .../0.8.1.2.2/configuration/kafka-broker.xml    |   21 +
 .../KAFKA/0.8.1.2.2/metainfo.xml                |    1 +
 .../KAFKA/0.8.1.2.2/package/scripts/kafka.py    |    4 +
 .../KAFKA/0.8.1.2.2/package/scripts/params.py   |   10 +-
 .../OOZIE/4.0.0.2.0/package/scripts/oozie.py    |   11 +-
 .../4.0.0.2.0/package/scripts/oozie_server.py   |   36 +-
 .../package/scripts/oozie_server_upgrade.py     |    4 +-
 .../4.0.0.2.0/package/scripts/params_linux.py   |    3 +
 .../STORM/0.9.1.2.1/metainfo.xml                |    1 +
 .../0.9.1.2.1/package/scripts/params_linux.py   |   13 +-
 .../0.9.1.2.1/package/templates/config.yaml.j2  |    8 +-
 .../templates/storm-metrics2.properties.j2      |   10 +-
 .../common-services/YARN/2.1.0.2.0/metainfo.xml |    2 +
 .../main/resources/scripts/Ambaripreupload.py   |   59 +-
 .../scripts/shared_initialization.py            |    1 +
 .../2.0.6/hooks/before-START/scripts/params.py  |   10 +-
 .../scripts/shared_initialization.py            |    1 +
 .../templates/hadoop-metrics2.properties.j2     |   25 +-
 .../stacks/HDP/2.0.6/services/stack_advisor.py  |    2 +
 .../HDP/2.1/upgrades/nonrolling-upgrade-2.3.xml |    3 +-
 .../HDP/2.2/upgrades/nonrolling-upgrade-2.2.xml |    5 +-
 .../HDP/2.2/upgrades/nonrolling-upgrade-2.3.xml |    5 +-
 .../HDP/2.2/upgrades/nonrolling-upgrade-2.4.xml |    5 +-
 .../stacks/HDP/2.2/upgrades/upgrade-2.2.xml     |    5 +-
 .../stacks/HDP/2.2/upgrades/upgrade-2.3.xml     |    5 +-
 .../stacks/HDP/2.2/upgrades/upgrade-2.4.xml     |    5 +-
 .../stacks/HDP/2.3/role_command_order.json      |    3 +-
 .../stacks/HDP/2.3/services/KAFKA/alerts.json   |   32 +
 .../services/RANGER/themes/theme_version_2.json |   20 +-
 .../services/YARN/configuration/yarn-env.xml    |    6 +-
 .../HDP/2.3/upgrades/nonrolling-upgrade-2.3.xml |    5 +-
 .../HDP/2.3/upgrades/nonrolling-upgrade-2.4.xml |    5 +-
 .../stacks/HDP/2.3/upgrades/upgrade-2.3.xml     |    5 +-
 .../stacks/HDP/2.3/upgrades/upgrade-2.4.xml     |    5 +-
 .../configuration/hive-interactive-site.xml     | 2053 ++++++++++++++++++
 .../stacks/HDP/2.4/services/HIVE/metainfo.xml   |   49 +
 .../HDP/2.4/upgrades/nonrolling-upgrade-2.4.xml |    5 +-
 .../stacks/HDP/2.4/upgrades/upgrade-2.4.xml     |    5 +-
 .../src/main/resources/stacks/stack_advisor.py  |    3 +-
 .../server/checks/CheckDatabaseHelperTest.java  |  295 +++
 .../ComponentSSLConfigurationTest.java          |    6 +-
 .../BlueprintConfigurationProcessorTest.java    |   67 +-
 .../PermissionResourceProviderTest.java         |    2 +
 .../ganglia/GangliaPropertyProviderTest.java    |   26 +-
 .../GangliaReportPropertyProviderTest.java      |    2 +-
 .../timeline/AMSPropertyProviderTest.java       |   30 +-
 .../timeline/AMSReportPropertyProviderTest.java |    4 +-
 .../ldap/AmbariLdapDataPopulatorTest.java       |   57 +
 .../upgrades/RangerConfigCalculationTest.java   |   27 +
 .../server/stack/KerberosDescriptorTest.java    |   10 +-
 .../ambari/server/stack/StackManagerTest.java   |    4 +
 .../server/upgrade/UpgradeCatalog240Test.java   |   57 +-
 .../src/test/python/TestAmbariServer.py         |   36 +
 .../AMBARI_METRICS/test_metrics_collector.py    |    8 +
 .../stacks/2.0.6/OOZIE/test_oozie_server.py     |   33 +-
 .../stacks/2.0.6/common/test_stack_advisor.py   |   59 +
 .../python/stacks/2.0.6/configs/default.json    | 1146 +++++-----
 .../2.0.6/configs/default_ams_embedded.json     |    7 +
 .../hooks/before-START/test_before_start.py     |    4 +
 .../stacks/2.2/common/test_stack_advisor.py     |    3 +-
 ambari-server/src/test/resources/os_family.json |   89 +-
 ambari-web/app/assets/test/tests.js             |    4 +
 ambari-web/app/config.js                        |    3 +-
 .../global/background_operations_controller.js  |   15 +-
 .../main/admin/kerberos/step7_controller.js     |   12 +-
 ambari-web/app/controllers/main/host.js         |    2 +-
 .../controllers/main/service/info/configs.js    |    4 +-
 ambari-web/app/controllers/main/service/item.js |   11 +-
 ambari-web/app/controllers/wizard.js            |    4 +-
 .../app/controllers/wizard/step1_controller.js  |    2 +
 .../app/controllers/wizard/step7_controller.js  |   35 +-
 .../app/controllers/wizard/step8_controller.js  |   30 +-
 .../app/mappers/service_metrics_mapper.js       |    3 +-
 ambari-web/app/messages.js                      |    3 +
 .../app/mixins/common/widgets/widget_mixin.js   |   29 +-
 .../mixins/wizard/assign_master_components.js   |   37 +-
 ambari-web/app/models/host_component.js         |    1 +
 ambari-web/app/models/quick_links.js            |   12 +
 ambari-web/app/routes/add_service_routes.js     |    1 +
 ambari-web/app/styles/alerts.less               |   25 +-
 ambari-web/app/styles/application.less          |   12 +-
 .../templates/common/host_progress_popup.hbs    |    5 +
 .../admin/stack_upgrade/edit_repositories.hbs   |    7 +
 .../main/alerts/instance_service_host.hbs       |   30 +-
 ambari-web/app/templates/wizard/step1.hbs       |    7 +
 ambari-web/app/utils/ajax/ajax.js               |   15 +
 .../utils/configs/rm_ha_config_initializer.js   |    2 +-
 ambari-web/app/utils/host_progress_popup.js     |   10 +-
 ambari-web/app/views/application.js             |   22 +-
 .../configs/widgets/config_widget_view.js       |    3 -
 .../common/host_progress_popup_body_view.js     |   75 +-
 .../app/views/common/log_file_search_view.js    |    2 +-
 ambari-web/app/views/common/modal_popup.js      |   27 +
 .../modal_popups/log_file_search_popup.js       |   12 +-
 .../app/views/common/quick_view_link_view.js    |    3 +
 .../views/common/widget/graph_widget_view.js    |   18 +-
 .../admin/highAvailability/progress_view.js     |    4 +-
 .../stack_upgrade/upgrade_version_box_view.js   |    8 +-
 .../main/alerts/definition_details_view.js      |    8 +
 ambari-web/app/views/main/host/add_view.js      |    3 +
 .../app/views/main/host/configs_service.js      |    3 +
 ambari-web/app/views/main/host/menu.js          |    6 +-
 ambari-web/app/views/main/host/summary.js       |    9 +-
 .../global/background_operations_test.js        |   16 +
 ambari-web/test/controllers/wizard_test.js      |    5 +-
 .../test/mixins/common/widget_mixin_test.js     |    2 +-
 .../host_progress_popup_body_view_test.js       |   54 +-
 .../views/common/log_file_search_view_test.js   |    2 +-
 .../test/views/common/quick_link_view_test.js   |    5 +
 .../highAvailability/progress_view_test.js      |    4 +-
 .../test/views/main/host/add_view_test.js       |  141 ++
 .../views/main/host/combo_search_box_test.js    |   42 +
 .../views/main/host/config_service_menu_test.js |  140 ++
 .../test/views/main/host/config_service_test.js |   46 +
 .../views/main/host/host_alerts_view_test.js    |  140 +-
 ambari-web/test/views/main/host/menu_test.js    |   43 +-
 ambari-web/test/views/main/host/summary_test.js |  277 ++-
 pom.xml                                         |   17 +
 215 files changed, 6513 insertions(+), 1235 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/718f2ea1/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/stackVersions/StackVersionsCreateCtrl.js
----------------------------------------------------------------------
diff --cc ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/stackVersions/StackVersionsCreateCtrl.js
index 931b7ec,002d393..190670a
--- a/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/stackVersions/StackVersionsCreateCtrl.js
+++ b/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/stackVersions/StackVersionsCreateCtrl.js
@@@ -23,6 -23,9 +23,7 @@@ angular.module('ambariAdminConsole'
    $scope.createController = true;
    $scope.osList = [];
    $scope.skipValidation = false;
+   $scope.useRedhatSatellite = false;
 -  $scope.selectedOS = 0;
 -  $scope.repoSubversion = "";
  
    $scope.clusterName = $routeParams.clusterName;
    $scope.subversionPattern = /^\d+\.\d+(-\d+)?$/;

http://git-wip-us.apache.org/repos/asf/ambari/blob/718f2ea1/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/stackVersions/StackVersionsEditCtrl.js
----------------------------------------------------------------------
diff --cc ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/stackVersions/StackVersionsEditCtrl.js
index cd9cf40,3c38444..b86515f
--- a/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/stackVersions/StackVersionsEditCtrl.js
+++ b/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/stackVersions/StackVersionsEditCtrl.js
@@@ -23,12 -23,8 +23,13 @@@ angular.module('ambariAdminConsole'
    $scope.editController = true;
    $scope.osList = [];
    $scope.skipValidation = false;
+   $scope.useRedhatSatellite = false;
    $scope.selectedOS = 0;
 +  $scope.upgradeStack = {
 +    stack_name: '',
 +    stack_version: '',
 +    display_name: ''
 +  };
  
    $scope.loadStackVersionInfo = function () {
      return Stack.getRepo($routeParams.versionId, $routeParams.stackName).then(function (response) {

http://git-wip-us.apache.org/repos/asf/ambari/blob/718f2ea1/ambari-admin/src/main/resources/ui/admin-web/app/scripts/i18n.config.js
----------------------------------------------------------------------
diff --cc ambari-admin/src/main/resources/ui/admin-web/app/scripts/i18n.config.js
index aa0b830,0c67831..4caf85f
--- a/ambari-admin/src/main/resources/ui/admin-web/app/scripts/i18n.config.js
+++ b/ambari-admin/src/main/resources/ui/admin-web/app/scripts/i18n.config.js
@@@ -308,27 -311,19 +315,29 @@@ angular.module('ambariAdminConsole'
        'os': 'OS',
        'baseURL': 'Base URL',
        'skipValidation': 'Skip Repository Base URL validation (Advanced)',
 +      'noVersions': 'Select version to display details.',
 +      'contents': {
 +        'title': 'Contents',
 +        'empty': 'No contents to display'
 +      },
 +      'details': {
 +        'stackName': 'Stack Name',
 +        'displayName': 'Display Name',
 +        'version': 'Version',
 +        'actualVersion': 'Actual Version',
 +        'releaseNotes': 'Release Notes'
 +      },
+       'useRedhatSatellite': 'Use RedHat Satellite/Spacewalk',
 -
 -
        'changeBaseURLConfirmation': {
          'title': 'Confirm Base URL Change',
          'message': 'You are about to change repository Base URLs that are already in use. Please confirm that you intend to make this change and that the new Base URLs point to the same exact Stack version and build'
        },
  
        'alerts': {
 -        'baseURLs': 'Provide Base URLs for the Operating Systems you are configuring. Uncheck all other Operating Systems.',
 +        'baseURLs': 'Provide Base URLs for the Operating Systems you are configuring.',
          'validationFailed': 'Some of the repositories failed validation. Make changes to the base url or skip validation if you are sure that urls are correct',
          'skipValidationWarning': '<b>Warning:</b> This is for advanced users only. Use this option if you want to skip validation for Repository Base URLs.',
+         'useRedhatSatelliteWarning': 'Disable distributed repositories and use RedHat Satellite/Spacewalk channels instead',
          'filterListError': 'Fetch stack version filter list error',
          'versionCreated': 'Created version <a href="#/stackVersions/{{stackName}}/{{versionName}}/edit">{{stackName}}-{{versionName}}</a>',
          'versionCreationError': 'Version creation error',

http://git-wip-us.apache.org/repos/asf/ambari/blob/718f2ea1/ambari-admin/src/main/resources/ui/admin-web/app/styles/main.css
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/718f2ea1/ambari-admin/src/main/resources/ui/admin-web/app/views/stackVersions/stackVersionPage.html
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/718f2ea1/ambari-server/src/main/java/org/apache/ambari/server/configuration/Configuration.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/718f2ea1/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClusterImpl.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/718f2ea1/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/ClusterGrouping.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/718f2ea1/ambari-server/src/main/resources/Ambari-DDL-Derby-CREATE.sql
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/718f2ea1/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/718f2ea1/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/718f2ea1/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/718f2ea1/ambari-server/src/main/resources/Ambari-DDL-Postgres-EMBEDDED-CREATE.sql
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/718f2ea1/ambari-server/src/main/resources/Ambari-DDL-SQLAnywhere-CREATE.sql
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/718f2ea1/ambari-server/src/main/resources/Ambari-DDL-SQLServer-CREATE.sql
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/718f2ea1/ambari-server/src/main/resources/stacks/HDP/2.2/upgrades/nonrolling-upgrade-2.2.xml
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/718f2ea1/ambari-server/src/main/resources/stacks/HDP/2.2/upgrades/upgrade-2.2.xml
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/718f2ea1/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.3.xml
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/718f2ea1/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.3.xml
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/718f2ea1/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/nonrolling-upgrade-2.4.xml
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/718f2ea1/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/upgrade-2.4.xml
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/718f2ea1/ambari-web/app/messages.js
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/718f2ea1/ambari-web/app/utils/host_progress_popup.js
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/718f2ea1/ambari-web/app/views/main/admin/stack_upgrade/upgrade_version_box_view.js
----------------------------------------------------------------------


[35/50] [abbrv] ambari git commit: Merge with trunk

Posted by nc...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/dea22be1/ambari-server/src/main/resources/Ambari-DDL-Postgres-EMBEDDED-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-Postgres-EMBEDDED-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-Postgres-EMBEDDED-CREATE.sql
index 3cc7516..105d695 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-Postgres-EMBEDDED-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-Postgres-EMBEDDED-CREATE.sql
@@ -202,8 +202,10 @@ CREATE TABLE ambari.servicecomponentdesiredstate (
   desired_stack_id BIGINT NOT NULL,
   desired_state VARCHAR(255) NOT NULL,
   service_name VARCHAR(255) NOT NULL,
+  recovery_enabled SMALLINT NOT NULL DEFAULT 0,
   CONSTRAINT pk_sc_desiredstate PRIMARY KEY (id),
   CONSTRAINT unq_scdesiredstate_name UNIQUE(component_name, service_name, cluster_id)
+  PRIMARY KEY (component_name, cluster_id, service_name)
 );
 GRANT ALL PRIVILEGES ON TABLE ambari.servicecomponentdesiredstate TO :username;
 
@@ -758,6 +760,7 @@ GRANT ALL PRIVILEGES ON TABLE ambari.setting TO :username;
 -- tasks indices --
 CREATE INDEX idx_stage_request_id ON ambari.stage (request_id);
 CREATE INDEX idx_hrc_request_id ON ambari.host_role_command (request_id);
+CREATE INDEX idx_hrc_status_role ON ambari.host_role_command (status, role);
 CREATE INDEX idx_rsc_request_id ON ambari.role_success_criteria (request_id);
 
 --------altering tables by creating unique constraints----------

http://git-wip-us.apache.org/repos/asf/ambari/blob/dea22be1/ambari-server/src/main/resources/Ambari-DDL-SQLAnywhere-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-SQLAnywhere-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-SQLAnywhere-CREATE.sql
index a5bfdc2..a897454 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-SQLAnywhere-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-SQLAnywhere-CREATE.sql
@@ -167,8 +167,10 @@ CREATE TABLE servicecomponentdesiredstate (
   desired_stack_id NUMERIC(19) NOT NULL,
   desired_state VARCHAR(255) NOT NULL,
   service_name VARCHAR(255) NOT NULL,
+  recovery_enabled SMALLINT NOT NULL DEFAULT 0,
   CONSTRAINT pk_sc_desiredstate PRIMARY KEY (id),
   CONSTRAINT unq_scdesiredstate_name UNIQUE(component_name, service_name, cluster_id)
+  PRIMARY KEY (component_name, cluster_id, service_name)
 );
 
 CREATE TABLE servicedesiredstate (
@@ -673,6 +675,7 @@ CREATE TABLE setting (
 -- tasks indices --
 CREATE INDEX idx_stage_request_id ON stage (request_id);
 CREATE INDEX idx_hrc_request_id ON host_role_command (request_id);
+CREATE INDEX idx_hrc_status_role ON host_role_command (status, role);
 CREATE INDEX idx_rsc_request_id ON role_success_criteria (request_id);
 
 -- altering tables by creating unique constraints----------

http://git-wip-us.apache.org/repos/asf/ambari/blob/dea22be1/ambari-server/src/main/resources/Ambari-DDL-SQLServer-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-SQLServer-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-SQLServer-CREATE.sql
index 8e5b2f8..a8bda7c 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-SQLServer-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-SQLServer-CREATE.sql
@@ -187,10 +187,12 @@ CREATE TABLE servicecomponentdesiredstate (
   desired_stack_id BIGINT NOT NULL,
   desired_state VARCHAR(255) NOT NULL,
   service_name VARCHAR(255) NOT NULL,
+  recovery_enabled SMALLINT NOT NULL DEFAULT 0,
   PRIMARY KEY CLUSTERED (id),
   CONSTRAINT pk_sc_desiredstate PRIMARY KEY (id),
   CONSTRAINT unq_scdesiredstate_name UNIQUE(component_name, service_name, cluster_id)
-);
+  PRIMARY KEY CLUSTERED (component_name, cluster_id, service_name)
+  );
 
 CREATE TABLE servicedesiredstate (
   cluster_id BIGINT NOT NULL,
@@ -782,6 +784,7 @@ CREATE TABLE setting (
 -- tasks indices --
 CREATE INDEX idx_stage_request_id ON stage (request_id);
 CREATE INDEX idx_hrc_request_id ON host_role_command (request_id);
+CREATE INDEX idx_hrc_status_role ON host_role_command (status, role);
 CREATE INDEX idx_rsc_request_id ON role_success_criteria (request_id);
 
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/dea22be1/ambari-server/src/main/resources/alert-templates.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/alert-templates.xml b/ambari-server/src/main/resources/alert-templates.xml
index d7fe320..2e8fc71 100644
--- a/ambari-server/src/main/resources/alert-templates.xml
+++ b/ambari-server/src/main/resources/alert-templates.xml
@@ -158,6 +158,20 @@
                   <div class="label-small">
                     $alert.getAlertText()
                   </div>
+                  <div class="label-small">
+                    Cluster: $alert.getAlertDefinition().getCluster().getClusterName()
+                  </div>
+                  #if( $alert.getHostName() )
+                    #if( $ambari.hasUrl() )
+                      <div class="label-small">
+                      Host: <a href=$ambari.getUrl()/#/main/hosts/$alert.getHostName()/summary>$ambari.getUrl()/#/main/hosts/$alert.getHostName()/summary</a>
+                      </div>
+                    #else
+                      <div class="label-small">
+                        Host: $alert.getHostName()
+                      </div>
+                    #end
+                  #end
                 </td>
               </tr>
             #end
@@ -170,6 +184,10 @@
     This notification was sent to $dispatch.getTargetName()
     <br/>
     Apache Ambari $ambari.getServerVersion()
+    #if( $ambari.hasUrl() )
+    <br/>
+    Ambari Server link: <a href=$ambari.getUrl()>$ambari.getUrl()</a>
+    #end
   </div>
 </html>
       ]]>
@@ -193,4 +211,4 @@
 $alert.getAlertText()]]>
     </body>
   </alert-template>  
-</alert-templates>
\ No newline at end of file
+</alert-templates>

http://git-wip-us.apache.org/repos/asf/ambari/blob/dea22be1/ambari-server/src/main/resources/common-services/KAFKA/0.8.1.2.2/configuration/kafka-broker.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/KAFKA/0.8.1.2.2/configuration/kafka-broker.xml b/ambari-server/src/main/resources/common-services/KAFKA/0.8.1.2.2/configuration/kafka-broker.xml
index 704d73f..857f40e 100644
--- a/ambari-server/src/main/resources/common-services/KAFKA/0.8.1.2.2/configuration/kafka-broker.xml
+++ b/ambari-server/src/main/resources/common-services/KAFKA/0.8.1.2.2/configuration/kafka-broker.xml
@@ -325,7 +325,6 @@
     <description>Timeline port</description>
   </property>
   <property>
-  <property>
     <name>kafka.timeline.metrics.protocol</name>
     <value>{{metric_collector_protocol}}</value>
     <description>Timeline protocol(http or https)</description>

http://git-wip-us.apache.org/repos/asf/ambari/blob/dea22be1/ambari-server/src/main/resources/properties.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/properties.json b/ambari-server/src/main/resources/properties.json
index 4052ad2..8ccae05 100644
--- a/ambari-server/src/main/resources/properties.json
+++ b/ambari-server/src/main/resources/properties.json
@@ -62,6 +62,7 @@
         "ServiceComponentInfo/total_count",
         "ServiceComponentInfo/started_count",
         "ServiceComponentInfo/installed_count",
+        "ServiceComponentInfo/recovery_enabled",
         "params/run_smoke_test",
         "_"
     ],

http://git-wip-us.apache.org/repos/asf/ambari/blob/dea22be1/ambari-server/src/main/resources/scripts/Ambaripreupload.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/scripts/Ambaripreupload.py b/ambari-server/src/main/resources/scripts/Ambaripreupload.py
index cc6213e..fb0c3e1 100644
--- a/ambari-server/src/main/resources/scripts/Ambaripreupload.py
+++ b/ambari-server/src/main/resources/scripts/Ambaripreupload.py
@@ -132,6 +132,7 @@ with Environment() as env:
   TAR_DESTINATION_FOLDER_SUFFIX = "_tar_destination_folder"
   
   class params:
+    hdfs_path_prefix = hdfs_path_prefix
     hdfs_user = "hdfs"
     mapred_user ="mapred"
     hadoop_bin_dir="/usr/hdp/" + hdp_version + "/hadoop/bin"
@@ -236,23 +237,23 @@ with Environment() as env:
     return _copy_files(source_and_dest_pairs, file_owner, group_owner, kinit_if_needed)
   
   def createHdfsResources():
-    params.HdfsResource('/atshistory', user='hdfs', change_permissions_for_parents=True, owner='yarn', group='hadoop', type='directory', action= ['create_on_execute'], mode=0755)
-    params.HdfsResource('/user/hcat', owner='hcat', type='directory', action=['create_on_execute'], mode=0755)
-    params.HdfsResource('/hive/warehouse', owner='hive', type='directory', action=['create_on_execute'], mode=0777)
-    params.HdfsResource('/user/hive', owner='hive', type='directory', action=['create_on_execute'], mode=0755)
-    params.HdfsResource('/tmp', mode=0777, action=['create_on_execute'], type='directory', owner='hdfs')
-    params.HdfsResource('/user/ambari-qa', type='directory', action=['create_on_execute'], mode=0770)
-    params.HdfsResource('/user/oozie', owner='oozie', type='directory', action=['create_on_execute'], mode=0775)
-    params.HdfsResource('/app-logs', recursive_chmod=True, owner='yarn', group='hadoop', type='directory', action=['create_on_execute'], mode=0777)
-    params.HdfsResource('/tmp/entity-file-history/active', owner='yarn', group='hadoop', type='directory', action=['create_on_execute'])
-    params.HdfsResource('/mapred', owner='mapred', type='directory', action=['create_on_execute'])
-    params.HdfsResource('/mapred/system', owner='hdfs', type='directory', action=['create_on_execute'])
-    params.HdfsResource('/mr-history/done', change_permissions_for_parents=True, owner='mapred', group='hadoop', type='directory', action=['create_on_execute'], mode=0777)
-    params.HdfsResource('/atshistory/done', owner='yarn', group='hadoop', type='directory', action=['create_on_execute'], mode=0700)
-    params.HdfsResource('/atshistory/active', owner='yarn', group='hadoop', type='directory', action=['create_on_execute'], mode=01777)
-    params.HdfsResource('/ams/hbase', owner='ams', type='directory', action=['create_on_execute'], mode=0775)
-    params.HdfsResource('/amshbase/staging', owner='ams', type='directory', action=['create_on_execute'], mode=0711)
-    params.HdfsResource('/user/ams/hbase', owner='ams', type='directory', action=['create_on_execute'], mode=0775)
+    params.HdfsResource(format('{hdfs_path_prefix}/atshistory'), user='hdfs', change_permissions_for_parents=True, owner='yarn', group='hadoop', type='directory', action= ['create_on_execute'], mode=0755)
+    params.HdfsResource(format('{hdfs_path_prefix}/user/hcat'), owner='hcat', type='directory', action=['create_on_execute'], mode=0755)
+    params.HdfsResource(format('{hdfs_path_prefix}/hive/warehouse'), owner='hive', type='directory', action=['create_on_execute'], mode=0777)
+    params.HdfsResource(format('{hdfs_path_prefix}/user/hive'), owner='hive', type='directory', action=['create_on_execute'], mode=0755)
+    params.HdfsResource(format('{hdfs_path_prefix}/tmp'), mode=0777, action=['create_on_execute'], type='directory', owner='hdfs')
+    params.HdfsResource(format('{hdfs_path_prefix}/user/ambari-qa'), type='directory', action=['create_on_execute'], mode=0770)
+    params.HdfsResource(format('{hdfs_path_prefix}/user/oozie'), owner='oozie', type='directory', action=['create_on_execute'], mode=0775)
+    params.HdfsResource(format('{hdfs_path_prefix}/app-logs'), recursive_chmod=True, owner='yarn', group='hadoop', type='directory', action=['create_on_execute'], mode=0777)
+    params.HdfsResource(format('{hdfs_path_prefix}/tmp/entity-file-history/active'), owner='yarn', group='hadoop', type='directory', action=['create_on_execute'])
+    params.HdfsResource(format('{hdfs_path_prefix}/mapred'), owner='mapred', type='directory', action=['create_on_execute'])
+    params.HdfsResource(format('{hdfs_path_prefix}/mapred/system'), owner='hdfs', type='directory', action=['create_on_execute'])
+    params.HdfsResource(format('{hdfs_path_prefix}/mr-history/done'), change_permissions_for_parents=True, owner='mapred', group='hadoop', type='directory', action=['create_on_execute'], mode=0777)
+    params.HdfsResource(format('{hdfs_path_prefix}/atshistory/done'), owner='yarn', group='hadoop', type='directory', action=['create_on_execute'], mode=0700)
+    params.HdfsResource(format('{hdfs_path_prefix}/atshistory/active'), owner='yarn', group='hadoop', type='directory', action=['create_on_execute'], mode=01777)
+    params.HdfsResource(format('{hdfs_path_prefix}/ams/hbase'), owner='ams', type='directory', action=['create_on_execute'], mode=0775)
+    params.HdfsResource(format('{hdfs_path_prefix}/amshbase/staging'), owner='ams', type='directory', action=['create_on_execute'], mode=0711)
+    params.HdfsResource(format('{hdfs_path_prefix}/user/ams/hbase'), owner='ams', type='directory', action=['create_on_execute'], mode=0775)
 
 
   def putCreatedHdfsResourcesToIgnore(env):
@@ -262,14 +263,16 @@ with Environment() as env:
     
     file_content = ""
     for file in env.config['hdfs_files']:
-      file_content += file['target']
+      if not file['target'].startswith(hdfs_path_prefix):
+        raise Exception("Something created outside hdfs_path_prefix!")
+      file_content += file['target'][len(hdfs_path_prefix):]
       file_content += "\n"
       
     with open("/var/lib/ambari-agent/data/.hdfs_resource_ignore", "a+") as fp:
       fp.write(file_content)
       
   def putSQLDriverToOozieShared():
-    params.HdfsResource('/user/oozie/share/lib/sqoop/{0}'.format(os.path.basename(SQL_DRIVER_PATH)),
+    params.HdfsResource(hdfs_path_prefix + '/user/oozie/share/lib/sqoop/{0}'.format(os.path.basename(SQL_DRIVER_PATH)),
                         owner='hdfs', type='file', action=['create_on_execute'], mode=0644, source=SQL_DRIVER_PATH)
       
   env.set_params(params)

http://git-wip-us.apache.org/repos/asf/ambari/blob/dea22be1/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/stack_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/stack_advisor.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/stack_advisor.py
index 7c69ac9..f6f8cde 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/stack_advisor.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/stack_advisor.py
@@ -581,8 +581,8 @@ class HDP206StackAdvisor(DefaultStackAdvisor):
         putAmsHbaseSiteProperty("hbase.regionserver.global.memstore.upperLimit", 0.3)
         putAmsHbaseSiteProperty("hbase.regionserver.global.memstore.lowerLimit", 0.25)
         putAmsHbaseSiteProperty("phoenix.query.maxGlobalMemoryPercentage", 20)
-        putAmsSiteProperty("phoenix.query.maxGlobalMemoryPercentage", 30)
         putAmsHbaseSiteProperty("phoenix.coprocessor.maxMetaDataCacheSize", 81920000)
+        putAmsSiteProperty("phoenix.query.maxGlobalMemoryPercentage", 30)
       elif total_sinks_count >= 500:
         putAmsHbaseSiteProperty("hbase.regionserver.handler.count", 60)
         putAmsHbaseSiteProperty("hbase.regionserver.hlog.blocksize", 134217728)
@@ -593,6 +593,9 @@ class HDP206StackAdvisor(DefaultStackAdvisor):
         putAmsHbaseSiteProperty("phoenix.coprocessor.maxMetaDataCacheSize", 20480000)
       pass
 
+    metrics_api_handlers = min(50, max(20, int(total_sinks_count / 100)))
+    putAmsSiteProperty("timeline.metrics.service.handler.thread.count", metrics_api_handlers)
+
     # Distributed mode heap size
     if operatingMode == "distributed":
       hbase_heapsize = max(hbase_heapsize, 756)

http://git-wip-us.apache.org/repos/asf/ambari/blob/dea22be1/ambari-server/src/main/resources/stacks/HDP/2.2/services/stack_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2/services/stack_advisor.py b/ambari-server/src/main/resources/stacks/HDP/2.2/services/stack_advisor.py
index dc968cc..cd25d77 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.2/services/stack_advisor.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2/services/stack_advisor.py
@@ -367,13 +367,16 @@ class HDP22StackAdvisor(HDP21StackAdvisor):
 
     yarn_queues = "default"
     capacitySchedulerProperties = {}
-    if "capacity-scheduler" in services['configurations'] and "capacity-scheduler" in services['configurations']["capacity-scheduler"]["properties"]:
-      properties = str(services['configurations']["capacity-scheduler"]["properties"]["capacity-scheduler"]).split('\n')
-      for property in properties:
-        key,sep,value = property.partition("=")
-        capacitySchedulerProperties[key] = value
-    if "yarn.scheduler.capacity.root.queues" in capacitySchedulerProperties:
-      yarn_queues = str(capacitySchedulerProperties["yarn.scheduler.capacity.root.queues"])
+    if "capacity-scheduler" in services['configurations']:
+      if "capacity-scheduler" in services['configurations']["capacity-scheduler"]["properties"]:
+        properties = str(services['configurations']["capacity-scheduler"]["properties"]["capacity-scheduler"]).split('\n')
+        for property in properties:
+          key,sep,value = property.partition("=")
+          capacitySchedulerProperties[key] = value
+      if "yarn.scheduler.capacity.root.queues" in capacitySchedulerProperties:
+        yarn_queues = str(capacitySchedulerProperties["yarn.scheduler.capacity.root.queues"])
+      elif "yarn.scheduler.capacity.root.queues" in services['configurations']["capacity-scheduler"]["properties"]:
+        yarn_queues =  services['configurations']["capacity-scheduler"]["properties"]["yarn.scheduler.capacity.root.queues"]
     # Interactive Queues property attributes
     putHiveServerPropertyAttribute = self.putPropertyAttribute(configurations, "hiveserver2-site")
     toProcessQueues = yarn_queues.split(",")

http://git-wip-us.apache.org/repos/asf/ambari/blob/dea22be1/ambari-server/src/main/resources/stacks/HDP/2.3/services/stack_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/services/stack_advisor.py b/ambari-server/src/main/resources/stacks/HDP/2.3/services/stack_advisor.py
index b354378..34e4cfa 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/services/stack_advisor.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/services/stack_advisor.py
@@ -943,31 +943,31 @@ class HDP23StackAdvisor(HDP22StackAdvisor):
                                 "HAWQ Master or Standby Master cannot use the port 5432 when installed on the same host as the Ambari Server. Ambari Postgres DB uses the same port. Please choose a different value (e.g. 10432)")})
 
     # 2. Check if any data directories are pointing to root dir '/'
-    prop_name = 'hawq_master_directory'
-    display_name = 'HAWQ Master directory'
-    self.validateIfRootDir (properties, validationItems, prop_name, display_name)
-
-    prop_name = 'hawq_master_temp_directory'
-    display_name = 'HAWQ Master temp directory'
-    self.validateIfRootDir (properties, validationItems, prop_name, display_name)
-
-    prop_name = 'hawq_segment_directory'
-    display_name = 'HAWQ Segment directory'
-    self.validateIfRootDir (properties, validationItems, prop_name, display_name)
-
-    prop_name = 'hawq_segment_temp_directory'
-    display_name = 'HAWQ Segment temp directory'
-    self.validateIfRootDir (properties, validationItems, prop_name, display_name)
+    directories = {
+                    'hawq_master_directory': 'HAWQ Master directory',
+                    'hawq_master_temp_directory': 'HAWQ Master temp directory',
+                    'hawq_segment_directory': 'HAWQ Segment directory',
+                    'hawq_segment_temp_directory': 'HAWQ Segment temp directory'
+                  }
+    for property_name, display_name in directories.iteritems():
+      self.validateIfRootDir(properties, validationItems, property_name, display_name)
 
     # 3. Check YARN RM address properties
+    YARN = "YARN"
     servicesList = [service["StackServices"]["service_name"] for service in services["services"]]
-    if "YARN" in servicesList and "yarn-site" in configurations:
+    if YARN in servicesList and "yarn-site" in configurations:
       yarn_site = getSiteProperties(configurations, "yarn-site")
       for hs_prop, ys_prop in self.getHAWQYARNPropertyMapping().items():
         if hs_prop in hawq_site and ys_prop in yarn_site and hawq_site[hs_prop] != yarn_site[ys_prop]:
           message = "Expected value: {0} (this property should have the same value as the property {1} in yarn-site)".format(yarn_site[ys_prop], ys_prop)
           validationItems.append({"config-name": hs_prop, "item": self.getWarnItem(message)})
 
+    # 4. Check HAWQ Resource Manager type
+    HAWQ_GLOBAL_RM_TYPE = "hawq_global_rm_type"
+    if YARN not in servicesList and HAWQ_GLOBAL_RM_TYPE in hawq_site and hawq_site[HAWQ_GLOBAL_RM_TYPE].upper() == YARN:
+      message = "{0} must be set to none if YARN service is not installed".format(HAWQ_GLOBAL_RM_TYPE)
+      validationItems.append({"config-name": HAWQ_GLOBAL_RM_TYPE, "item": self.getErrorItem(message)})
+
     return self.toConfigurationValidationProblems(validationItems, "hawq-site")
   
   

http://git-wip-us.apache.org/repos/asf/ambari/blob/dea22be1/ambari-server/src/main/resources/stacks/HDP/2.4/services/HIVE/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.4/services/HIVE/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.4/services/HIVE/metainfo.xml
index ee973ed..93728fd 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.4/services/HIVE/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.4/services/HIVE/metainfo.xml
@@ -26,7 +26,7 @@
             <name>HIVE_SERVER_INTERACTIVE</name>
             <displayName>HiveServer2 Interactive</displayName>
             <category>MASTER</category>
-            <cardinality>1</cardinality>
+            <cardinality>0+</cardinality>
             <versionAdvertised>true</versionAdvertised>
             <clientsToUpdateConfigs></clientsToUpdateConfigs>
             <dependencies>
@@ -35,7 +35,7 @@
                 <scope>cluster</scope>
                 <auto-deploy>
                   <enabled>true</enabled>
-                  <co-locate>HIVE/HIVE_SERVER</co-locate>
+                  <co-locate>HIVE/HIVE_SERVER_INTERACTIVE</co-locate>
                 </auto-deploy>
               </dependency>
               <dependency>
@@ -43,6 +43,15 @@
                 <scope>host</scope>
                 <auto-deploy>
                   <enabled>true</enabled>
+                  <co-locate>HIVE/HIVE_SERVER_INTERACTIVE</co-locate>
+                </auto-deploy>
+              </dependency>
+              <dependency>
+                <name>HDFS/HDFS_CLIENT</name>
+                <scope>host</scope>
+                <auto-deploy>
+                  <enabled>true</enabled>
+                  <co-locate>HIVE/HIVE_SERVER_INTERACTIVE</co-locate>
                 </auto-deploy>
               </dependency>
               <dependency>
@@ -50,6 +59,7 @@
                 <scope>host</scope>
                 <auto-deploy>
                   <enabled>true</enabled>
+                  <co-locate>HIVE/HIVE_SERVER_INTERACTIVE</co-locate>
                 </auto-deploy>
               </dependency>
               <dependency>
@@ -57,9 +67,26 @@
                 <scope>host</scope>
                 <auto-deploy>
                   <enabled>true</enabled>
+                  <co-locate>HIVE/HIVE_SERVER_INTERACTIVE</co-locate>
+                </auto-deploy>
+              </dependency>
+              <dependency>
+                <name>PIG/PIG</name>
+                <scope>host</scope>
+                <auto-deploy>
+                  <enabled>true</enabled>
+                  <co-locate>HIVE/HIVE_SERVER_INTERACTIVE</co-locate>
+                </auto-deploy>
+              </dependency>
+              <dependency>
+                <name>SLIDER/SLIDER</name>
+                <scope>host</scope>
+                <auto-deploy>
+                  <enabled>true</enabled>
+                  <co-locate>HIVE/HIVE_SERVER_INTERACTIVE</co-locate>
                 </auto-deploy>
               </dependency>
-              </dependencies>
+            </dependencies>
                 <commandScript>
                   <script>scripts/hive_server_interactive.py</script>
                   <scriptType>PYTHON</scriptType>
@@ -70,6 +97,14 @@
                 </configuration-dependencies>
           </component>
         </components>
+      <requiredServices>
+        <service>ZOOKEEPER</service>
+        <service>HDFS</service>
+        <service>YARN</service>
+        <service>TEZ</service>
+        <service>PIG</service>
+        <service>SLIDER</service>
+      </requiredServices>
     </service>
   </services>
 </metainfo>

http://git-wip-us.apache.org/repos/asf/ambari/blob/dea22be1/ambari-server/src/test/java/org/apache/ambari/server/actionmanager/TestActionScheduler.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/actionmanager/TestActionScheduler.java b/ambari-server/src/test/java/org/apache/ambari/server/actionmanager/TestActionScheduler.java
index bc4d397..af6fb9b 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/actionmanager/TestActionScheduler.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/actionmanager/TestActionScheduler.java
@@ -607,8 +607,8 @@ public class TestActionScheduler {
     doAnswer(new Answer<List<HostRoleCommand>>() {
       @Override
       public List<HostRoleCommand> answer(InvocationOnMock invocation) throws Throwable {
-        String role = (String) invocation.getArguments()[1];
-        HostRoleStatus status = (HostRoleStatus) invocation.getArguments()[2];
+        String role = (String) invocation.getArguments()[0];
+        HostRoleStatus status = (HostRoleStatus) invocation.getArguments()[1];
 
         HostRoleCommand task = s.getHostRoleCommand(null, role);
 
@@ -618,7 +618,7 @@ public class TestActionScheduler {
           return Collections.emptyList();
         }
       }
-    }).when(db).getTasksByHostRoleAndStatus(anyString(), anyString(), any(HostRoleStatus.class));
+    }).when(db).getTasksByRoleAndStatus(anyString(), any(HostRoleStatus.class));
 
     ServerActionExecutor.init(injector);
     ActionScheduler scheduler = new ActionScheduler(100, 50, db, aq, fsm, 3,
@@ -762,8 +762,8 @@ public class TestActionScheduler {
     doAnswer(new Answer<List<HostRoleCommand>>() {
       @Override
       public List<HostRoleCommand> answer(InvocationOnMock invocation) throws Throwable {
-        String role = (String) invocation.getArguments()[1];
-        HostRoleStatus status = (HostRoleStatus) invocation.getArguments()[2];
+        String role = (String) invocation.getArguments()[0];
+        HostRoleStatus status = (HostRoleStatus) invocation.getArguments()[1];
 
         HostRoleCommand task = s.getHostRoleCommand(null, role);
 
@@ -774,7 +774,7 @@ public class TestActionScheduler {
         }
 
       }
-    }).when(db).getTasksByHostRoleAndStatus(anyString(), anyString(), any(HostRoleStatus.class));
+    }).when(db).getTasksByRoleAndStatus(anyString(), any(HostRoleStatus.class));
 
     ServerActionExecutor.init(injector);
     ActionScheduler scheduler = new ActionScheduler(100, 50, db, aq, fsm, 3,
@@ -843,8 +843,8 @@ public class TestActionScheduler {
     doAnswer(new Answer<List<HostRoleCommand>>() {
       @Override
       public List<HostRoleCommand> answer(InvocationOnMock invocation) throws Throwable {
-        String role = (String) invocation.getArguments()[1];
-        HostRoleStatus status = (HostRoleStatus) invocation.getArguments()[2];
+        String role = (String) invocation.getArguments()[0];
+        HostRoleStatus status = (HostRoleStatus) invocation.getArguments()[1];
 
         HostRoleCommand task = s.getHostRoleCommand(null, role);
 
@@ -854,7 +854,7 @@ public class TestActionScheduler {
           return Collections.emptyList();
         }
       }
-    }).when(db).getTasksByHostRoleAndStatus(anyString(), anyString(), any(HostRoleStatus.class));
+    }).when(db).getTasksByRoleAndStatus(anyString(), any(HostRoleStatus.class));
 
     ActionScheduler scheduler = new ActionScheduler(100, 50, db, aq, fsm, 3,
         new HostsMap((String) null), unitOfWork, null, conf);
@@ -1951,8 +1951,8 @@ public class TestActionScheduler {
     doAnswer(new Answer<List<HostRoleCommand>>() {
       @Override
       public List<HostRoleCommand> answer(InvocationOnMock invocation) throws Throwable {
-        String role = (String) invocation.getArguments()[1];
-        HostRoleStatus status = (HostRoleStatus) invocation.getArguments()[2];
+        String role = (String) invocation.getArguments()[0];
+        HostRoleStatus status = (HostRoleStatus) invocation.getArguments()[1];
 
         HostRoleCommand task = s.getHostRoleCommand(null, role);
 
@@ -1962,7 +1962,7 @@ public class TestActionScheduler {
           return Collections.emptyList();
         }
       }
-    }).when(db).getTasksByHostRoleAndStatus(anyString(), anyString(), any(HostRoleStatus.class));
+    }).when(db).getTasksByRoleAndStatus(anyString(), any(HostRoleStatus.class));
 
     doAnswer(new Answer<HostRoleCommand>() {
       @Override

http://git-wip-us.apache.org/repos/asf/ambari/blob/dea22be1/ambari-server/src/test/java/org/apache/ambari/server/agent/AgentResourceTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/agent/AgentResourceTest.java b/ambari-server/src/test/java/org/apache/ambari/server/agent/AgentResourceTest.java
index 510e1fb..6cb9e6f 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/agent/AgentResourceTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/agent/AgentResourceTest.java
@@ -36,6 +36,7 @@ import org.apache.ambari.server.agent.rest.AgentResource;
 import org.apache.ambari.server.api.services.AmbariMetaInfo;
 import org.apache.ambari.server.events.publishers.AmbariEventPublisher;
 import org.apache.ambari.server.orm.DBAccessor;
+import org.apache.ambari.server.orm.dao.HostRoleCommandDAO;
 import org.apache.ambari.server.security.SecurityHelper;
 import org.apache.ambari.server.security.SecurityHelperImpl;
 import org.apache.ambari.server.stack.StackManagerFactory;
@@ -308,6 +309,7 @@ public class AgentResourceTest extends RandomPortJerseyTest {
       bind(HeartBeatHandler.class).toInstance(handler);
       bind(AmbariMetaInfo.class).toInstance(ambariMetaInfo);
       bind(DBAccessor.class).toInstance(mock(DBAccessor.class));
+      bind(HostRoleCommandDAO.class).toInstance(mock(HostRoleCommandDAO.class));
     }
 
     private void installDependencies() {

http://git-wip-us.apache.org/repos/asf/ambari/blob/dea22be1/ambari-server/src/test/java/org/apache/ambari/server/configuration/ConfigurationTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/configuration/ConfigurationTest.java b/ambari-server/src/test/java/org/apache/ambari/server/configuration/ConfigurationTest.java
index 4e236f3..3ecb5aa 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/configuration/ConfigurationTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/configuration/ConfigurationTest.java
@@ -563,4 +563,99 @@ public class ConfigurationTest {
     Assert.assertEquals(44, configuration.getPropertyProvidersThreadPoolMaxSize());
   }
 
+
+  public void testGetHostRoleCommandStatusSummaryCacheSize() throws  Exception {
+    // Given
+    final Properties ambariProperties = new Properties();
+    final Configuration configuration = new Configuration(ambariProperties);
+    ambariProperties.setProperty(Configuration.SERVER_HRC_STATUS_SUMMARY_CACHE_SIZE, "3000");
+
+    // When
+    long actualCacheSize = configuration.getHostRoleCommandStatusSummaryCacheSize();
+
+    // Then
+    Assert.assertEquals(actualCacheSize, 3000L);
+  }
+
+  @Test
+  public void testGetHostRoleCommandStatusSummaryCacheSizeDefault() throws  Exception {
+    // Given
+    final Properties ambariProperties = new Properties();
+    final Configuration configuration = new Configuration(ambariProperties);
+
+    // When
+    long actualCacheSize = configuration.getHostRoleCommandStatusSummaryCacheSize();
+
+    // Then
+    Assert.assertEquals(actualCacheSize, Configuration.SERVER_HRC_STATUS_SUMMARY_CACHE_SIZE_DEFAULT);
+  }
+
+  @Test
+  public void testGetHostRoleCommandStatusSummaryCacheExpiryDuration() throws  Exception {
+    // Given
+    final Properties ambariProperties = new Properties();
+    final Configuration configuration = new Configuration(ambariProperties);
+    ambariProperties.setProperty(Configuration.SERVER_HRC_STATUS_SUMMARY_CACHE_EXPIRY_DURATION, "60");
+
+    // When
+    long actualCacheExpiryDuration = configuration.getHostRoleCommandStatusSummaryCacheExpiryDuration();
+
+    // Then
+    Assert.assertEquals(actualCacheExpiryDuration, 60L);
+  }
+
+  @Test
+  public void testGetHostRoleCommandStatusSummaryCacheExpiryDurationDefault() throws  Exception {
+    // Given
+    final Properties ambariProperties = new Properties();
+    final Configuration configuration = new Configuration(ambariProperties);
+
+    // When
+    long actualCacheExpiryDuration = configuration.getHostRoleCommandStatusSummaryCacheExpiryDuration();
+
+    // Then
+    Assert.assertEquals(actualCacheExpiryDuration, Configuration.SERVER_HRC_STATUS_SUMMARY_CACHE_EXPIRY_DURATION_DEFAULT);
+  }
+
+  @Test
+  public void testGetHostRoleCommandStatusSummaryCacheEnabled() throws  Exception {
+    // Given
+    final Properties ambariProperties = new Properties();
+    final Configuration configuration = new Configuration(ambariProperties);
+    ambariProperties.setProperty(Configuration.SERVER_HRC_STATUS_SUMMARY_CACHE_ENABLED, "true");
+
+    // When
+    boolean actualCacheEnabledConfig = configuration.getHostRoleCommandStatusSummaryCacheEnabled();
+
+    // Then
+    Assert.assertEquals(actualCacheEnabledConfig, true);
+  }
+
+  @Test
+  public void testGetHostRoleCommandStatusSummaryCacheDisabled() throws  Exception {
+    // Given
+    final Properties ambariProperties = new Properties();
+    final Configuration configuration = new Configuration(ambariProperties);
+    ambariProperties.setProperty(Configuration.SERVER_HRC_STATUS_SUMMARY_CACHE_ENABLED, "false");
+
+    // When
+    boolean actualCacheEnabledConfig = configuration.getHostRoleCommandStatusSummaryCacheEnabled();
+
+    // Then
+    Assert.assertEquals(actualCacheEnabledConfig, false);
+  }
+
+  @Test
+  public void testGetHostRoleCommandStatusSummaryCacheEnabledDefault() throws  Exception {
+    // Given
+    final Properties ambariProperties = new Properties();
+    final Configuration configuration = new Configuration(ambariProperties);
+
+    // When
+    boolean actualCacheEnabledConfig = configuration.getHostRoleCommandStatusSummaryCacheEnabled();
+
+    // Then
+    Assert.assertEquals(actualCacheEnabledConfig, Configuration.SERVER_HRC_STATUS_SUMMARY_CACHE_ENABLED_DEFAULT);
+  }
+
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/dea22be1/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java
index 7b26f23..992150c 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java
@@ -5679,6 +5679,7 @@ public class AmbariManagementControllerTest {
       clusters.getCluster(clusterName).getService(serviceName)
       .getServiceComponents().values()) {
       Assert.assertEquals(State.INSTALLED, sc.getDesiredState());
+      Assert.assertFalse(sc.isRecoveryEnabled()); // default value of recoveryEnabled
       for (ServiceComponentHost sch : sc.getServiceComponentHosts().values()) {
         Assert.assertEquals(State.INSTALLED, sch.getDesiredState());
         Assert.assertEquals(State.INIT, sch.getState());
@@ -5695,6 +5696,7 @@ public class AmbariManagementControllerTest {
     for (ServiceComponent sc :
       clusters.getCluster(clusterName).getService(serviceName)
           .getServiceComponents().values()) {
+      sc.setRecoveryEnabled(true);
       for (ServiceComponentHost sch : sc.getServiceComponentHosts().values()) {
         sch.setState(State.INSTALLED);
       }
@@ -5714,6 +5716,7 @@ public class AmbariManagementControllerTest {
       clusters.getCluster(clusterName).getService(serviceName)
           .getServiceComponents().values()) {
       Assert.assertEquals(State.INSTALLED, sc.getDesiredState());
+      Assert.assertTrue(sc.isRecoveryEnabled());
       for (ServiceComponentHost sch : sc.getServiceComponentHosts().values()) {
         Assert.assertEquals(State.INSTALLED, sch.getDesiredState());
         Assert.assertEquals(State.INSTALLED, sch.getState());

http://git-wip-us.apache.org/repos/asf/ambari/blob/dea22be1/ambari-server/src/test/java/org/apache/ambari/server/controller/KerberosHelperTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/KerberosHelperTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/KerberosHelperTest.java
index 2dcde00..f6027f3 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/KerberosHelperTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/KerberosHelperTest.java
@@ -42,6 +42,7 @@ import org.apache.ambari.server.controller.spi.Resource;
 import org.apache.ambari.server.controller.spi.ResourceProvider;
 import org.apache.ambari.server.metadata.RoleCommandOrder;
 import org.apache.ambari.server.orm.DBAccessor;
+import org.apache.ambari.server.orm.dao.HostRoleCommandDAO;
 import org.apache.ambari.server.security.SecurityHelper;
 import org.apache.ambari.server.security.credential.PrincipalKeyCredential;
 import org.apache.ambari.server.security.encryption.CredentialStoreService;
@@ -215,6 +216,7 @@ public class KerberosHelperTest extends EasyMockSupport {
         bind(CreatePrincipalsServerAction.class).toInstance(createMock(CreatePrincipalsServerAction.class));
         bind(CreateKeytabFilesServerAction.class).toInstance(createMock(CreateKeytabFilesServerAction.class));
         bind(StackAdvisorHelper.class).toInstance(createMock(StackAdvisorHelper.class));
+        bind(HostRoleCommandDAO.class).toInstance(createNiceMock(HostRoleCommandDAO.class));
       }
     });
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/dea22be1/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ComponentResourceProviderTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ComponentResourceProviderTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ComponentResourceProviderTest.java
index f38fab1..d24ca09 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ComponentResourceProviderTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ComponentResourceProviderTest.java
@@ -225,11 +225,14 @@ public class ComponentResourceProviderTest {
     expect(service.getServiceComponents()).andReturn(serviceComponentMap).anyTimes();
 
     expect(serviceComponent1.convertToResponse()).andReturn(
-      new ServiceComponentResponse(100L, "Cluster100", "Service100", "Component100", null, "", 1, 1, 0));
+      new ServiceComponentResponse(100L, "Cluster100", "Service100", "Component100", null, "", 1, 1, 0,
+              true /* recovery enabled */));
     expect(serviceComponent2.convertToResponse()).andReturn(
-      new ServiceComponentResponse(100L, "Cluster100", "Service100", "Component101", null, "", 1, 1, 0));
+      new ServiceComponentResponse(100L, "Cluster100", "Service100", "Component101", null, "", 1, 1, 0,
+              false /* recovery not enabled */));
     expect(serviceComponent3.convertToResponse()).andReturn(
-      new ServiceComponentResponse(100L, "Cluster100", "Service100", "Component102", null, "", 1, 1, 0));
+      new ServiceComponentResponse(100L, "Cluster100", "Service100", "Component102", null, "", 1, 1, 0,
+              true /* recovery enabled */));
 
     expect(ambariMetaInfo.getComponent((String) anyObject(),
         (String) anyObject(), (String) anyObject(), (String) anyObject()))
@@ -258,6 +261,7 @@ public class ComponentResourceProviderTest {
     propertyIds.add(ComponentResourceProvider.COMPONENT_TOTAL_COUNT_PROPERTY_ID);
     propertyIds.add(ComponentResourceProvider.COMPONENT_STARTED_COUNT_PROPERTY_ID);
     propertyIds.add(ComponentResourceProvider.COMPONENT_INSTALLED_COUNT_PROPERTY_ID);
+    propertyIds.add(ComponentResourceProvider.COMPONENT_RECOVERY_ENABLED_ID);
 
     Predicate predicate = new PredicateBuilder()
       .property(ComponentResourceProvider.COMPONENT_CLUSTER_NAME_PROPERTY_ID)
@@ -282,6 +286,8 @@ public class ComponentResourceProviderTest {
         ComponentResourceProvider.COMPONENT_STARTED_COUNT_PROPERTY_ID));
       Assert.assertEquals(0, resource.getPropertyValue(
         ComponentResourceProvider.COMPONENT_INSTALLED_COUNT_PROPERTY_ID));
+      Assert.assertEquals(String.valueOf(true), resource.getPropertyValue(
+        ComponentResourceProvider.COMPONENT_RECOVERY_ENABLED_ID));
     }
 
     // verify
@@ -364,11 +370,14 @@ public class ComponentResourceProviderTest {
     expect(component3Info.getCategory()).andReturn(null);
 
     expect(serviceComponent1.convertToResponse()).andReturn(
-      new ServiceComponentResponse(100L, "Cluster100", "Service100", "Component101", null, "", 1, 0, 1));
+      new ServiceComponentResponse(100L, "Cluster100", "Service100", "Component101", null, "", 1, 0, 1,
+              false /* recovery not enabled */));
     expect(serviceComponent2.convertToResponse()).andReturn(
-      new ServiceComponentResponse(100L, "Cluster100", "Service100", "Component102", null, "", 1, 0, 1));
+      new ServiceComponentResponse(100L, "Cluster100", "Service100", "Component102", null, "", 1, 0, 1,
+              false /* recovery not enabled */));
     expect(serviceComponent3.convertToResponse()).andReturn(
-      new ServiceComponentResponse(100L, "Cluster100", "Service100", "Component103", null, "", 1, 0, 1));
+      new ServiceComponentResponse(100L, "Cluster100", "Service100", "Component103", null, "", 1, 0, 1,
+              false /* recovery not enabled */));
     expect(serviceComponent1.getDesiredState()).andReturn(State.INSTALLED).anyTimes();
     expect(serviceComponent2.getDesiredState()).andReturn(State.INSTALLED).anyTimes();
     expect(serviceComponent3.getDesiredState()).andReturn(State.INSTALLED).anyTimes();
@@ -412,6 +421,7 @@ public class ComponentResourceProviderTest {
 
     Map<String, Object> properties = new LinkedHashMap<String, Object>();
 
+    properties.put(ComponentResourceProvider.COMPONENT_RECOVERY_ENABLED_ID, String.valueOf(true) /* recovery enabled */);
     properties.put(ComponentResourceProvider.COMPONENT_STATE_PROPERTY_ID, "STARTED");
     properties.put(ComponentResourceProvider.COMPONENT_CLUSTER_NAME_PROPERTY_ID, "Cluster100");
 
@@ -607,7 +617,7 @@ public class ComponentResourceProviderTest {
 
     // requests
     ServiceComponentRequest request1 = new ServiceComponentRequest("cluster1", "service1", "component1",
-        null);
+        null, String.valueOf(true /* recovery enabled */));
 
     Set<ServiceComponentRequest> setRequests = new HashSet<ServiceComponentRequest>();
     setRequests.add(request1);
@@ -667,14 +677,15 @@ public class ComponentResourceProviderTest {
 
     // requests
     ServiceComponentRequest request1 = new ServiceComponentRequest("cluster1", "service1", "component1",
-        null);
+        null, String.valueOf(true /* recovery enabled */));
     ServiceComponentRequest request2 = new ServiceComponentRequest("cluster1", "service1", "component2",
-        null);
+        null, String.valueOf(true /* recovery enabled */));
     ServiceComponentRequest request3 = new ServiceComponentRequest("cluster1", "service1", "component3",
-        null);
+        null, String.valueOf(true /* recovery enabled */));
     ServiceComponentRequest request4 = new ServiceComponentRequest("cluster1", "service1", "component4",
-        null);
-    ServiceComponentRequest request5 = new ServiceComponentRequest("cluster1", "service2", null, null);
+        null, String.valueOf(true /* recovery enabled */));
+    ServiceComponentRequest request5 = new ServiceComponentRequest("cluster1", "service2", null, null,
+              String.valueOf(true /* recovery enabled */));
 
     Set<ServiceComponentRequest> setRequests = new HashSet<ServiceComponentRequest>();
     setRequests.add(request1);
@@ -758,7 +769,7 @@ public class ComponentResourceProviderTest {
 
     // requests
     ServiceComponentRequest request1 = new ServiceComponentRequest("cluster1", "service1", "component1",
-        null);
+        null, String.valueOf(true /* recovery enabled */));
 
     Set<ServiceComponentRequest> setRequests = new HashSet<ServiceComponentRequest>();
     setRequests.add(request1);

http://git-wip-us.apache.org/repos/asf/ambari/blob/dea22be1/ambari-server/src/test/java/org/apache/ambari/server/stack/StackManagerTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/stack/StackManagerTest.java b/ambari-server/src/test/java/org/apache/ambari/server/stack/StackManagerTest.java
index 455652b..d7a15e2 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/stack/StackManagerTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/stack/StackManagerTest.java
@@ -31,6 +31,7 @@ import static org.junit.Assert.assertTrue;
 import java.io.File;
 import java.io.FileReader;
 import java.lang.reflect.Type;
+import java.net.URL;
 import java.util.ArrayList;
 import java.util.Collection;
 import java.util.HashMap;
@@ -62,6 +63,7 @@ import org.apache.commons.lang.StringUtils;
 import org.junit.BeforeClass;
 import org.junit.Ignore;
 import org.junit.Test;
+import org.springframework.util.Assert;
 
 /**
  * StackManager unit tests.
@@ -641,12 +643,16 @@ public class StackManagerTest {
         stack.getKerberosDescriptorFileLocation());
   }
 
-  @Ignore
   @Test
   public void testMetricsLoaded() throws Exception {
 
-    String stackRoot = ClassLoader.getSystemClassLoader().getResource("stacks").getPath().replace("test-classes","classes");
-    String commonServices = ClassLoader.getSystemClassLoader().getResource("common-services").getPath().replace("test-classes","classes");
+    URL rootDirectoryURL = StackManagerTest.class.getResource("/");
+    Assert.notNull(rootDirectoryURL);
+
+    File resourcesDirectory = new File(new File(rootDirectoryURL.getFile()).getParentFile().getParentFile(), "src/main/resources");
+
+    File stackRoot = new File(resourcesDirectory, "stacks");
+    File commonServices = new File(resourcesDirectory, "common-services");
 
     MetainfoDAO metaInfoDao = createNiceMock(MetainfoDAO.class);
     StackDAO stackDao = createNiceMock(StackDAO.class);
@@ -660,7 +666,7 @@ public class StackManagerTest {
 
     OsFamily osFamily = new OsFamily(config);
 
-    StackManager stackManager = new StackManager(new File(stackRoot), new File(commonServices),
+    StackManager stackManager = new StackManager(stackRoot, commonServices,
             osFamily, metaInfoDao, actionMetadata, stackDao);
 
     for (StackInfo stackInfo : stackManager.getStacks()) {
@@ -682,12 +688,15 @@ public class StackManagerTest {
     }
   }
 
-  @Ignore
   @Test
   public void testServicesWithRangerPluginRoleCommandOrder() throws AmbariException {
-    // Given
-    String stackRoot = ClassLoader.getSystemClassLoader().getResource("stacks").getPath().replace("test-classes","classes");
-    String commonServices = ClassLoader.getSystemClassLoader().getResource("common-services").getPath().replace("test-classes","classes");
+    URL rootDirectoryURL = StackManagerTest.class.getResource("/");
+    Assert.notNull(rootDirectoryURL);
+
+    File resourcesDirectory = new File(new File(rootDirectoryURL.getFile()).getParentFile().getParentFile(), "src/main/resources");
+
+    File stackRoot = new File(resourcesDirectory, "stacks");
+    File commonServices = new File(resourcesDirectory, "common-services");
 
     MetainfoDAO metaInfoDao = createNiceMock(MetainfoDAO.class);
     StackDAO stackDao = createNiceMock(StackDAO.class);
@@ -701,7 +710,7 @@ public class StackManagerTest {
 
     OsFamily osFamily = new OsFamily(config);
 
-    StackManager stackManager = new StackManager(new File(stackRoot), new File(commonServices), osFamily, metaInfoDao, actionMetadata, stackDao);
+    StackManager stackManager = new StackManager(stackRoot, commonServices, osFamily, metaInfoDao, actionMetadata, stackDao);
 
     String rangerUserSyncRoleCommand = Role.RANGER_USERSYNC + "-" + RoleCommand.START;
     String rangerAdminRoleCommand = Role.RANGER_ADMIN + "-" + RoleCommand.START;
@@ -783,14 +792,6 @@ public class StackManagerTest {
 
     assertTrue(rangerUserSyncRoleCommand + " should be dependent of " + rangerAdminRoleCommand, rangerUserSyncBlockers.contains(rangerAdminRoleCommand));
     assertTrue(rangerUserSyncRoleCommand + " should be dependent of " + kmsRoleCommand, rangerUserSyncBlockers.contains(kmsRoleCommand));
-
-    // Zookeeper Server
-    ArrayList<String> zookeeperBlockers = (ArrayList<String>)generalDeps.get(zookeeperServerRoleCommand);
-
-    assertTrue(zookeeperServerRoleCommand + " should be dependent of " + rangerUserSyncRoleCommand, zookeeperBlockers.contains(rangerUserSyncRoleCommand));
-
   }
-
-
   //todo: component override assertions
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/dea22be1/ambari-server/src/test/java/org/apache/ambari/server/state/ConfigHelperTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/state/ConfigHelperTest.java b/ambari-server/src/test/java/org/apache/ambari/server/state/ConfigHelperTest.java
index 98424b7..9fe0fc3 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/state/ConfigHelperTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/state/ConfigHelperTest.java
@@ -48,6 +48,7 @@ import org.apache.ambari.server.controller.spi.ClusterController;
 import org.apache.ambari.server.orm.DBAccessor;
 import org.apache.ambari.server.orm.GuiceJpaInitializer;
 import org.apache.ambari.server.orm.InMemoryDefaultTestModule;
+import org.apache.ambari.server.orm.dao.HostRoleCommandDAO;
 import org.apache.ambari.server.security.SecurityHelper;
 import org.apache.ambari.server.security.TestAuthenticationFactory;
 import org.apache.ambari.server.stack.StackManagerFactory;
@@ -754,6 +755,7 @@ public class ConfigHelperTest {
           bind(Clusters.class).toInstance(createNiceMock(ClustersImpl.class));
           bind(ClusterController.class).toInstance(clusterController);
           bind(StackManagerFactory.class).toInstance(createNiceMock(StackManagerFactory.class));
+          bind(HostRoleCommandDAO.class).toInstance(createNiceMock(HostRoleCommandDAO.class));
         }
       });
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/dea22be1/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog222Test.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog222Test.java b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog222Test.java
index 6061e06..077df33 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog222Test.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog222Test.java
@@ -19,12 +19,12 @@
 package org.apache.ambari.server.upgrade;
 
 
-import com.google.common.collect.Maps;
-import com.google.gson.Gson;
-import com.google.inject.Guice;
-import com.google.inject.Injector;
-import com.google.inject.Provider;
-import com.google.inject.persist.PersistService;
+import java.lang.reflect.Method;
+import java.util.HashMap;
+import java.util.Map;
+
+import javax.persistence.EntityManager;
+
 import org.apache.ambari.server.actionmanager.ActionManager;
 import org.apache.ambari.server.api.services.AmbariMetaInfo;
 import org.apache.ambari.server.controller.AmbariManagementController;
@@ -33,6 +33,7 @@ import org.apache.ambari.server.controller.ConfigurationRequest;
 import org.apache.ambari.server.controller.ConfigurationResponse;
 import org.apache.ambari.server.controller.KerberosHelper;
 import org.apache.ambari.server.controller.MaintenanceStateHelper;
+import org.apache.ambari.server.orm.DBAccessor;
 import org.apache.ambari.server.orm.GuiceJpaInitializer;
 import org.apache.ambari.server.orm.InMemoryDefaultTestModule;
 import org.apache.ambari.server.orm.dao.StackDAO;
@@ -40,6 +41,7 @@ import org.apache.ambari.server.orm.entities.StackEntity;
 import org.apache.ambari.server.state.Cluster;
 import org.apache.ambari.server.state.Clusters;
 import org.apache.ambari.server.state.Config;
+import org.apache.ambari.server.state.stack.OsFamily;
 import org.easymock.Capture;
 import org.easymock.EasyMock;
 import org.easymock.EasyMockSupport;
@@ -47,10 +49,14 @@ import org.junit.After;
 import org.junit.Before;
 import org.junit.Test;
 
-import javax.persistence.EntityManager;
-import java.lang.reflect.Method;
-import java.util.HashMap;
-import java.util.Map;
+import com.google.common.collect.Maps;
+import com.google.gson.Gson;
+import com.google.inject.Binder;
+import com.google.inject.Guice;
+import com.google.inject.Injector;
+import com.google.inject.Module;
+import com.google.inject.Provider;
+import com.google.inject.persist.PersistService;
 
 import static org.easymock.EasyMock.anyObject;
 import static org.easymock.EasyMock.anyString;
@@ -58,6 +64,7 @@ import static org.easymock.EasyMock.capture;
 import static org.easymock.EasyMock.createMockBuilder;
 import static org.easymock.EasyMock.createNiceMock;
 import static org.easymock.EasyMock.createStrictMock;
+import static org.easymock.EasyMock.eq;
 import static org.easymock.EasyMock.expect;
 import static org.easymock.EasyMock.expectLastCall;
 import static org.easymock.EasyMock.replay;
@@ -101,6 +108,7 @@ public class UpgradeCatalog222Test {
     Method updateAlerts = UpgradeCatalog222.class.getDeclaredMethod("updateAlerts");
     Method updateStormConfigs = UpgradeCatalog222.class.getDeclaredMethod("updateStormConfigs");
     Method updateAMSConfigs = UpgradeCatalog222.class.getDeclaredMethod("updateAMSConfigs");
+    Method updateHostRoleCommands = UpgradeCatalog222.class.getDeclaredMethod("updateHostRoleCommands");
 
 
     UpgradeCatalog222 upgradeCatalog222 = createMockBuilder(UpgradeCatalog222.class)
@@ -108,6 +116,7 @@ public class UpgradeCatalog222Test {
             .addMockedMethod(updateAlerts)
             .addMockedMethod(updateStormConfigs)
             .addMockedMethod(updateAMSConfigs)
+            .addMockedMethod(updateHostRoleCommands)
             .createMock();
 
     upgradeCatalog222.addNewConfigurationsFromXml();
@@ -118,6 +127,8 @@ public class UpgradeCatalog222Test {
     expectLastCall().once();
     upgradeCatalog222.updateAMSConfigs();
     expectLastCall().once();
+    upgradeCatalog222.updateHostRoleCommands();
+    expectLastCall().once();
 
     replay(upgradeCatalog222);
 
@@ -203,4 +214,28 @@ public class UpgradeCatalog222Test {
 
   }
 
+  @Test
+  public void testUpdateHostRoleCommands() throws Exception {
+    final DBAccessor dbAccessor = createNiceMock(DBAccessor.class);
+    dbAccessor.createIndex(eq("idx_hrc_status"), eq("host_role_command"), eq("status"), eq("role"));
+    expectLastCall().once();
+
+    replay(dbAccessor);
+
+    Module module = new Module() {
+      @Override
+      public void configure(Binder binder) {
+        binder.bind(DBAccessor.class).toInstance(dbAccessor);
+        binder.bind(OsFamily.class).toInstance(createNiceMock(OsFamily.class));
+      }
+    };
+
+    Injector injector = Guice.createInjector(module);
+    UpgradeCatalog222 upgradeCatalog222 = injector.getInstance(UpgradeCatalog222.class);
+    upgradeCatalog222.updateHostRoleCommands();
+
+
+    verify(dbAccessor);
+  }
+
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/dea22be1/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog240Test.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog240Test.java b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog240Test.java
index 95ae8d8..f5fafbc 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog240Test.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog240Test.java
@@ -103,6 +103,7 @@ public class UpgradeCatalog240Test {
   @Test
   public void testExecuteDDLUpdates() throws SQLException, AmbariException {
     Capture<DBAccessor.DBColumnInfo> capturedColumnInfo = newCapture();
+    Capture<DBAccessor.DBColumnInfo> capturedScColumnInfo = newCapture();
     final DBAccessor dbAccessor = createStrictMock(DBAccessor.class);
     Configuration configuration = createNiceMock(Configuration.class);
     Connection connection = createNiceMock(Connection.class);
@@ -111,6 +112,8 @@ public class UpgradeCatalog240Test {
     Capture<List<DBAccessor.DBColumnInfo>> capturedSettingColumns = EasyMock.newCapture();
 
     dbAccessor.addColumn(eq("adminpermission"), capture(capturedColumnInfo));
+    dbAccessor.addColumn(eq(UpgradeCatalog240.SERVICE_COMPONENT_DESIRED_STATE_TABLE), capture(capturedScColumnInfo));
+
     dbAccessor.createTable(eq("setting"), capture(capturedSettingColumns), eq("id"));
     expect(configuration.getDatabaseUrl()).andReturn(Configuration.JDBC_IN_MEMORY_URL).anyTimes();
     expect(dbAccessor.getConnection()).andReturn(connection);
@@ -176,6 +179,15 @@ public class UpgradeCatalog240Test {
     Assert.assertEquals(1, columnInfo.getDefaultValue());
     Assert.assertEquals(false, columnInfo.isNullable());
 
+    // Verify if recovery_enabled column was added to servicecomponentdesiredstate table
+    DBAccessor.DBColumnInfo columnScInfo = capturedScColumnInfo.getValue();
+    Assert.assertNotNull(columnScInfo);
+    Assert.assertEquals(UpgradeCatalog240.RECOVERY_ENABLED_COL, columnScInfo.getName());
+    Assert.assertEquals(null, columnScInfo.getLength());
+    Assert.assertEquals(Short.class, columnScInfo.getType());
+    Assert.assertEquals(0, columnScInfo.getDefaultValue());
+    Assert.assertEquals(false, columnScInfo.isNullable());
+
     Map<String, Class> expectedCaptures = new HashMap<>();
     expectedCaptures.put("id", Long.class);
     expectedCaptures.put("name", String.class);

http://git-wip-us.apache.org/repos/asf/ambari/blob/dea22be1/ambari-server/src/test/java/org/apache/ambari/server/utils/StageUtilsTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/utils/StageUtilsTest.java b/ambari-server/src/test/java/org/apache/ambari/server/utils/StageUtilsTest.java
index 854263c..215d137 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/utils/StageUtilsTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/utils/StageUtilsTest.java
@@ -37,6 +37,7 @@ import org.apache.ambari.server.agent.ExecutionCommand;
 import org.apache.ambari.server.api.services.AmbariMetaInfo;
 import org.apache.ambari.server.orm.DBAccessor;
 import org.apache.ambari.server.orm.dao.HostDAO;
+import org.apache.ambari.server.orm.dao.HostRoleCommandDAO;
 import org.apache.ambari.server.security.SecurityHelper;
 import org.apache.ambari.server.security.encryption.CredentialStoreService;
 import org.apache.ambari.server.stack.StackManagerFactory;
@@ -118,6 +119,7 @@ public class StageUtilsTest extends EasyMockSupport {
         bind(HostRoleCommandFactory.class).to(HostRoleCommandFactoryImpl.class);
         bind(HostDAO.class).toInstance(createNiceMock(HostDAO.class));
         bind(PersistedState.class).toInstance(createNiceMock(PersistedState.class));
+        bind(HostRoleCommandDAO.class).toInstance(createNiceMock(HostRoleCommandDAO.class));
       }
     });
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/dea22be1/ambari-server/src/test/python/stacks/2.0.6/common/test_stack_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/common/test_stack_advisor.py b/ambari-server/src/test/python/stacks/2.0.6/common/test_stack_advisor.py
index 7c578f2..e15582e 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/common/test_stack_advisor.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/common/test_stack_advisor.py
@@ -1820,7 +1820,7 @@ class TestHDP206StackAdvisor(TestCase):
     )
     recommendedDefaults = {"property1": "file:///grid/0/var/dir"}
     warn = self.stackAdvisor.validatorNotRootFs(properties, recommendedDefaults, 'property1', hostInfo)
-    self.assertIsNotNone(warn)
+    self.assertTrue(warn != None)
     self.assertEquals({'message': 'It is not recommended to use root partition for property1', 'level': 'WARN'}, warn)
 
     # Set by user /var mountpoint, which is non-root , but not preferable - no warning
@@ -1831,7 +1831,7 @@ class TestHDP206StackAdvisor(TestCase):
         "mountpoint" : "/var"
       }
     )
-    self.assertIsNone(self.stackAdvisor.validatorNotRootFs(properties, recommendedDefaults, 'property1', hostInfo))
+    self.assertTrue(self.stackAdvisor.validatorNotRootFs(properties, recommendedDefaults, 'property1', hostInfo) == None)
 
   def test_validatorEnoughDiskSpace(self):
     reqiuredDiskSpace = 1048576
@@ -1847,7 +1847,7 @@ class TestHDP206StackAdvisor(TestCase):
       }
     ]}
     properties = {"property1": "file:///var/dir"}
-    self.assertIsNone(self.stackAdvisor.validatorEnoughDiskSpace(properties, 'property1', hostInfo, reqiuredDiskSpace))
+    self.assertTrue(self.stackAdvisor.validatorEnoughDiskSpace(properties, 'property1', hostInfo, reqiuredDiskSpace) == None)
 
     # local FS, no enough space
     hostInfo = {"disk_info": [
@@ -1858,16 +1858,16 @@ class TestHDP206StackAdvisor(TestCase):
       }
     ]}
     warn = self.stackAdvisor.validatorEnoughDiskSpace(properties, 'property1', hostInfo, reqiuredDiskSpace)
-    self.assertIsNotNone(warn)
+    self.assertTrue(warn != None)
     self.assertEquals({'message': errorMsg, 'level': 'WARN'}, warn)
 
     # non-local FS, HDFS
     properties = {"property1": "hdfs://h1"}
-    self.assertIsNone(self.stackAdvisor.validatorEnoughDiskSpace(properties, 'property1', hostInfo, reqiuredDiskSpace))
+    self.assertTrue(self.stackAdvisor.validatorEnoughDiskSpace(properties, 'property1', hostInfo, reqiuredDiskSpace) == None)
 
     # non-local FS, WASB
     properties = {"property1": "wasb://h1"}
-    self.assertIsNone(self.stackAdvisor.validatorEnoughDiskSpace(properties, 'property1', hostInfo, reqiuredDiskSpace))
+    self.assertTrue(self.stackAdvisor.validatorEnoughDiskSpace(properties, 'property1', hostInfo, reqiuredDiskSpace) == None)
 
   def test_round_to_n(self):
     self.assertEquals(self.stack_advisor_impl.round_to_n(0), 0)

http://git-wip-us.apache.org/repos/asf/ambari/blob/dea22be1/ambari-server/src/test/python/stacks/2.2/common/test_stack_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.2/common/test_stack_advisor.py b/ambari-server/src/test/python/stacks/2.2/common/test_stack_advisor.py
index 14a28d3..d230030 100644
--- a/ambari-server/src/test/python/stacks/2.2/common/test_stack_advisor.py
+++ b/ambari-server/src/test/python/stacks/2.2/common/test_stack_advisor.py
@@ -1028,7 +1028,7 @@ class TestHDP22StackAdvisor(TestCase):
       'hive-site': {
         'properties': {
           'hive.server2.enable.doAs': 'true',
-          'hive.server2.tez.default.queues': "default",
+          'hive.server2.tez.default.queues': "queue1,queue2",
           'hive.server2.tez.initialize.default.sessions': 'false',
           'hive.server2.tez.sessions.per.default.queue': '1',
           'hive.auto.convert.join.noconditionaltask.size': '268435456',
@@ -1073,7 +1073,16 @@ class TestHDP22StackAdvisor(TestCase):
          'hive.server2.authentication.kerberos.keytab': {'delete': 'true'},
          'hive.server2.authentication.ldap.url': {'delete': 'true'},
          'hive.server2.tez.default.queues': {
-           'entries': [{'value': 'default', 'label': 'default queue'}]
+           "entries": [
+             {
+               "value": "queue1",
+               "label": "queue1 queue"
+             },
+             {
+               "value": "queue2",
+               "label": "queue2 queue"
+             }
+           ]
           }
         }
       },
@@ -2052,6 +2061,7 @@ class TestHDP22StackAdvisor(TestCase):
           "timeline.metrics.cluster.aggregate.splitpoints": " ",
           "timeline.metrics.host.aggregate.splitpoints": " ",
           "timeline.metrics.host.aggregator.ttl": "1",
+          "timeline.metrics.service.handler.thread.count": "20",
           'timeline.metrics.service.watcher.disabled': 'false'
         }
       }

http://git-wip-us.apache.org/repos/asf/ambari/blob/dea22be1/ambari-server/src/test/python/stacks/2.3/common/test_stack_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.3/common/test_stack_advisor.py b/ambari-server/src/test/python/stacks/2.3/common/test_stack_advisor.py
index 545a2b5..03ae6cc 100644
--- a/ambari-server/src/test/python/stacks/2.3/common/test_stack_advisor.py
+++ b/ambari-server/src/test/python/stacks/2.3/common/test_stack_advisor.py
@@ -1630,6 +1630,7 @@ class TestHDP23StackAdvisor(TestCase):
   def test_validateHAWQConfigurations(self):
     services = self.load_json("services-hawq-3-hosts.json")
     # setup default configuration values
+    # Test hawq_rm_yarn_address and hawq_rm_scheduler_address are set correctly
     configurations = services["configurations"]
     configurations["hawq-site"] = {"properties": {"hawq_rm_yarn_address": "localhost:8032",
                                                   "hawq_rm_yarn_scheduler_address": "localhost:8030"}}
@@ -1664,3 +1665,48 @@ class TestHDP23StackAdvisor(TestCase):
     self.assertEqual(len(problems), 2)
     self.assertEqual(problems_dict, expected_warnings)
 
+    # Test hawq_global_rm_type validation
+    services = {
+                 "services" : [
+                   {
+                     "StackServices" : {
+                     "service_name" : "HAWQ"
+                     },
+                     "components": []
+                   } ],
+                 "configurations":
+                   {
+                     "hawq-site": {
+                       "properties": {
+                         "hawq_global_rm_type": "yarn"
+                       }
+                     }
+                   }
+                }
+    properties = services["configurations"]["hawq-site"]["properties"]
+
+    # case 1: hawq_global_rm_type is set as yarn, but YARN service is not installed. Validation error expected.
+    """
+    Validation error expected is as below:
+                    [ {
+                          "config-type": "hawq-site",
+                          "message": "hawq_global_rm_type must be set to none if YARN service is not installed",
+                          "type": "configuration",
+                          "config-name": "hawq_global_rm_type",
+                          "level": "ERROR"
+                    } ]
+    """
+    problems = self.stackAdvisor.validateHAWQConfigurations(properties, defaults, services["configurations"], services, hosts)
+    self.assertEqual(len(problems), 1)
+    self.assertEqual(problems[0]["config-type"], "hawq-site")
+    self.assertEqual(problems[0]["message"], "hawq_global_rm_type must be set to none if YARN service is not installed")
+    self.assertEqual(problems[0]["type"], "configuration")
+    self.assertEqual(problems[0]["config-name"], "hawq_global_rm_type")
+    self.assertEqual(problems[0]["level"], "ERROR")
+
+
+    # case 2: hawq_global_rm_type is set as yarn, and YARN service is installed. No validation errors expected.
+    services["services"].append({"StackServices" : {"service_name" : "YARN"}, "components":[]})
+
+    problems = self.stackAdvisor.validateHAWQConfigurations(properties, defaults, services["configurations"], services, hosts)
+    self.assertEqual(len(problems), 0)

http://git-wip-us.apache.org/repos/asf/ambari/blob/dea22be1/ambari-web/app/assets/test/tests.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/assets/test/tests.js b/ambari-web/app/assets/test/tests.js
index 06c4c31..4edcc5e 100644
--- a/ambari-web/app/assets/test/tests.js
+++ b/ambari-web/app/assets/test/tests.js
@@ -239,6 +239,7 @@ var files = [
   'test/views/main/dashboard/widgets/uptime_text_widget_test',
   'test/views/main/dashboard/widgets/node_managers_live_test',
   'test/views/main/dashboard/widgets/datanode_live_test',
+  'test/views/main/dashboard/widgets/hawqsegment_live_test',
   'test/views/main/dashboard/widgets/hbase_average_load_test',
   'test/views/main/dashboard/widgets/hbase_regions_in_transition_test',
   'test/views/main/dashboard/widgets/namenode_rpc_test',

http://git-wip-us.apache.org/repos/asf/ambari/blob/dea22be1/ambari-web/app/controllers/main/admin/highAvailability/hawq/addStandby/step3_controller.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/controllers/main/admin/highAvailability/hawq/addStandby/step3_controller.js b/ambari-web/app/controllers/main/admin/highAvailability/hawq/addStandby/step3_controller.js
index 513a519..fd28ad5 100644
--- a/ambari-web/app/controllers/main/admin/highAvailability/hawq/addStandby/step3_controller.js
+++ b/ambari-web/app/controllers/main/admin/highAvailability/hawq/addStandby/step3_controller.js
@@ -141,7 +141,7 @@ App.AddHawqStandbyWizardStep3Controller = Em.Controller.extend({
   submit: function () {
     if (!this.get('isSubmitDisabled')) {
       dataDir = this.get('hawqProps').items[0].properties['hawq_master_directory'];
-      hawqStandby = this.get('hawqProps').items[0].properties['hawq_standby_address_host']
+      hawqStandby = this.get('content.hawqHosts.newHawqStandby');
       App.showConfirmationPopup(
         function() {
           App.get('router.mainAdminKerberosController').getKDCSessionState(function() {

http://git-wip-us.apache.org/repos/asf/ambari/blob/dea22be1/ambari-web/app/mappers/components_state_mapper.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/mappers/components_state_mapper.js b/ambari-web/app/mappers/components_state_mapper.js
index 0f2b627..ac3e1b5 100644
--- a/ambari-web/app/mappers/components_state_mapper.js
+++ b/ambari-web/app/mappers/components_state_mapper.js
@@ -59,6 +59,11 @@ App.componentsStateMapper = App.QuickDataMapper.create({
       node_managers_installed: 'INSTALLED_PATH',
       node_managers_total: 'TOTAL_PATH'
     },
+    'HAWQSEGMENT': {
+      hawq_segments_started: 'STARTED_PATH',
+      hawq_segments_installed: 'INSTALLED_PATH',
+      hawq_segments_total: 'TOTAL_PATH'
+    },
     'HBASE_REGIONSERVER': {
       region_servers_started: 'STARTED_PATH',
       region_servers_installed: 'INSTALLED_PATH',

http://git-wip-us.apache.org/repos/asf/ambari/blob/dea22be1/ambari-web/app/messages.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/messages.js b/ambari-web/app/messages.js
index 1246a5c..95f87f8 100644
--- a/ambari-web/app/messages.js
+++ b/ambari-web/app/messages.js
@@ -2532,6 +2532,7 @@ Em.I18n.translations = {
   'dashboard.widgets.YARNLinks': 'YARN Links',
   'dashboard.widgets.error.invalid': 'Invalid! Enter a number between 0 - {0}',
   'dashboard.widgets.error.smaller': 'Threshold 1 should be smaller than threshold 2!',
+  'dashboard.widgets.HawqSegmentUp': 'HAWQ Segments Live',
 
   'dashboard': {
     'widgets': {
@@ -2636,6 +2637,10 @@ Em.I18n.translations = {
   'dashboard.services.hbase.masterStarted':'Master Started',
   'dashboard.services.hbase.masterActivated':'Master Activated',
 
+  'dashboard.services.hawq.segments.started':'started',
+  'dashboard.services.hawq.segments.stopped':'stopped',
+  'dashboard.services.hawq.segments.total':'in total',
+
   'dashboard.services.hive.clients':'Hive Clients',
   'dashboard.services.hive.client':'Hive Client',
   'dashboard.services.hive.metastore':'Hive Metastore',

http://git-wip-us.apache.org/repos/asf/ambari/blob/dea22be1/ambari-web/app/models/alerts/alert_definition.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/models/alerts/alert_definition.js b/ambari-web/app/models/alerts/alert_definition.js
index e91bd4f..6c25f7e 100644
--- a/ambari-web/app/models/alerts/alert_definition.js
+++ b/ambari-web/app/models/alerts/alert_definition.js
@@ -41,8 +41,8 @@ App.AlertDefinition = DS.Model.extend({
   groups: DS.hasMany('App.AlertGroup'),
   reporting: DS.hasMany('App.AlertReportDefinition'),
   parameters: DS.hasMany('App.AlertDefinitionParameter'),
-  lastTriggered: DS.attr('number'),
-  lastTriggeredRaw: DS.attr('number'),
+  lastTriggered: 0,
+  lastTriggeredRaw: 0,
 
   //relates only to SCRIPT-type alert definition
   location: DS.attr('string'),

http://git-wip-us.apache.org/repos/asf/ambari/blob/dea22be1/ambari-web/app/views.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views.js b/ambari-web/app/views.js
index 4b6b857..18b43a8 100644
--- a/ambari-web/app/views.js
+++ b/ambari-web/app/views.js
@@ -220,6 +220,7 @@ require('views/main/dashboard/widgets/namenode_heap');
 require('views/main/dashboard/widgets/namenode_cpu');
 require('views/main/dashboard/widgets/hdfs_capacity');
 require('views/main/dashboard/widgets/datanode_live');
+require('views/main/dashboard/widgets/hawqsegment_live');
 require('views/main/dashboard/widgets/namenode_rpc');
 require('views/main/dashboard/widgets/metrics_memory');
 require('views/main/dashboard/widgets/metrics_network');

http://git-wip-us.apache.org/repos/asf/ambari/blob/dea22be1/ambari-web/app/views/common/configs/widgets/list_config_widget_view.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views/common/configs/widgets/list_config_widget_view.js b/ambari-web/app/views/common/configs/widgets/list_config_widget_view.js
index fcf2aac..568c405 100644
--- a/ambari-web/app/views/common/configs/widgets/list_config_widget_view.js
+++ b/ambari-web/app/views/common/configs/widgets/list_config_widget_view.js
@@ -288,16 +288,15 @@ App.ListConfigWidgetView = App.ConfigWidgetView.extend({
   },
 
   isOptionExist: function(value) {
-    var isExist = false;
-    if (value !== null && value !== undefined) {
+    var isExist = true;
+    if (Em.isNone(value)) {
+      return !isExist;
+    } else {
       value = Em.typeOf(value) == 'string' ? value.split(',') : value;
       value.forEach(function(item) {
-        isExist = this.get('options').mapProperty('value').contains(item);
+        isExist = isExist && this.get('options').mapProperty('value').contains(item);
       }, this);
       return isExist;
-    } else {
-      return false;
     }
   }
-
 });

http://git-wip-us.apache.org/repos/asf/ambari/blob/dea22be1/ambari-web/app/views/main/dashboard/widgets.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views/main/dashboard/widgets.js b/ambari-web/app/views/main/dashboard/widgets.js
index a2fb281..8a86af6 100644
--- a/ambari-web/app/views/main/dashboard/widgets.js
+++ b/ambari-web/app/views/main/dashboard/widgets.js
@@ -128,7 +128,8 @@ App.MainDashboardWidgetsView = Em.View.extend(App.UserPref, App.LocalStorage, Ap
       '13', '12', '14', '16', //hbase
       '17', '18', '19', '20', '23', // all yarn
       '21', // storm
-      '22' // flume
+      '22', // flume
+      '24' // hawq
     ]; // all in order
     var hiddenFull = [
       ['15', 'Region In Transition']
@@ -173,6 +174,12 @@ App.MainDashboardWidgetsView = Em.View.extend(App.UserPref, App.LocalStorage, Ap
         visibleFull = visibleFull.without(item);
       }, this);
     }
+    if (this.get('hawq_model') == null) {
+      var hawq = ['24'];
+      hawq.forEach(function (item) {
+        visibleFull = visibleFull.without(item);
+      }, this);
+    }
     var obj = this.get('initPrefObject');
     obj.set('visible', visibleFull);
     obj.set('hidden', hiddenFull);
@@ -192,6 +199,8 @@ App.MainDashboardWidgetsView = Em.View.extend(App.UserPref, App.LocalStorage, Ap
 
   flume_model: null,
 
+  hawq_model: null,
+
   /**
    * List of visible widgets
    * @type {Ember.Enumerable}
@@ -383,7 +392,8 @@ App.MainDashboardWidgetsView = Em.View.extend(App.UserPref, App.LocalStorage, Ap
       hbase_model: ['12', '13', '14', '15', '16'],
       yarn_model: ['17', '18', '19', '20', '23'],
       storm_model: ['21'],
-      flume_model: ['22']
+      flume_model: ['22'],
+      hawq_model: ['24']
     };
 
     // check each service, find out the newly added service and already deleted service
@@ -450,7 +460,8 @@ App.MainDashboardWidgetsView = Em.View.extend(App.UserPref, App.LocalStorage, Ap
       '20': App.YARNMemoryPieChartView,
       '21': App.SuperVisorUpView,
       '22': App.FlumeAgentUpView,
-      '23': App.YARNLinksView
+      '23': App.YARNLinksView,
+      '24': App.HawqSegmentUpView
     }, id);
   },
 
@@ -467,7 +478,7 @@ App.MainDashboardWidgetsView = Em.View.extend(App.UserPref, App.LocalStorage, Ap
     visible: [],
     hidden: [],
     threshold: {1: [80, 90], 2: [85, 95], 3: [90, 95], 4: [80, 90], 5: [1000, 3000], 6: [], 7: [], 8: [], 9: [], 10: [], 11: [], 12: [], 13: [70, 90], 14: [150, 250], 15: [3, 10], 16: [],
-      17: [70, 90], 18: [], 19: [50, 75], 20: [50, 75], 21: [85, 95], 22: [85, 95], 23: []} // id:[thresh1, thresh2]
+      17: [70, 90], 18: [], 19: [50, 75], 20: [50, 75], 21: [85, 95], 22: [85, 95], 23: [], 24: [80, 90]} // id:[thresh1, thresh2]
   }),
 
   /**

http://git-wip-us.apache.org/repos/asf/ambari/blob/dea22be1/ambari-web/app/views/main/dashboard/widgets/hawqsegment_live.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views/main/dashboard/widgets/hawqsegment_live.js b/ambari-web/app/views/main/dashboard/widgets/hawqsegment_live.js
new file mode 100644
index 0000000..e8d0656
--- /dev/null
+++ b/ambari-web/app/views/main/dashboard/widgets/hawqsegment_live.js
@@ -0,0 +1,190 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+var App = require('app');
+
+App.HawqSegmentUpView = App.TextDashboardWidgetView.extend({
+
+  title: Em.I18n.t('dashboard.widgets.HawqSegmentUp'),
+  id: '24',
+
+  isPieChart: false,
+  isText: true,
+  isProgressBar: false,
+  model_type: 'hawq',
+
+  hiddenInfo: function () {
+    var result = [];
+    result.pushObject(this.get('hawqSegmentsStarted') + ' ' + Em.I18n.t('dashboard.services.hawq.segments.started'));
+    result.pushObject(this.get('hawqSegmentsInstalled') + ' ' + Em.I18n.t('dashboard.services.hawq.segments.stopped'));
+    result.pushObject(this.get('hawqSegmentsTotal')+ ' ' + Em.I18n.t('dashboard.services.hawq.segments.total'));
+    return result;
+  }.property('hawqSegmentsStarted', 'hawqSegmentsInstalled', 'hawqSegmentsTotal'),
+  hiddenInfoClass: "hidden-info-three-line",
+
+  thresh1: 40,
+  thresh2: 70,
+  maxValue: 100,
+
+  hawqSegmentsStarted: function () {
+    if (Em.isNone(this.get('model.hawqSegmentsStarted'))) {
+      return Em.I18n.t('services.service.summary.notAvailable');
+    }
+    return this.get('model.hawqSegmentsStarted');
+  }.property('model.hawqSegmentsStarted'),
+
+  hawqSegmentsInstalled: function () {
+    if (Em.isNone(this.get('model.hawqSegmentsInstalled'))) {
+      return Em.I18n.t('services.service.summary.notAvailable');
+    }
+    return this.get('model.hawqSegmentsInstalled');
+  }.property('model.hawqSegmentsInstalled'),
+
+  hawqSegmentsTotal: function () {
+    if (Em.isNone(this.get('model.hawqSegmentsTotal'))) {
+      return Em.I18n.t('services.service.summary.notAvailable');
+    }
+    return this.get('model.hawqSegmentsTotal');
+  }.property('model.hawqSegmentsTotal'),
+
+  data: function () {
+    if (Em.isNone(this.get('model.hawqSegmentsStarted')) || Em.isNone(this.get('model.hawqSegmentsTotal'))) {
+      return null;
+    } else {
+      return ((this.get('hawqSegmentsStarted') / this.get('model.hawqSegmentsTotal')).toFixed(2)) * 100;
+    }
+  }.property('model.hawqSegmentsTotal', 'hawqSegmentsStarted'),
+
+  content: function () {
+    if (Em.isNone(this.get('model.hawqSegmentsStarted')) || Em.isNone(this.get('model.hawqSegmentsTotal'))) {
+      return Em.I18n.t('services.service.summary.notAvailable');
+    } else {
+      return this.get('hawqSegmentsStarted') + "/" + this.get('model.hawqSegmentsTotal');
+    }
+  }.property('model.hawqSegmentsTotal', 'hawqSegmentsStarted'),
+
+  editWidget: function (event) {
+    var parent = this;
+    var max_tmp =  parseFloat(parent.get('maxValue'));
+    var configObj = Ember.Object.create({
+      thresh1: parent.get('thresh1') + '',
+      thresh2: parent.get('thresh2') + '',
+      hintInfo: Em.I18n.t('dashboard.widgets.hintInfo.hint1').format(max_tmp),
+      isThresh1Error: false,
+      isThresh2Error: false,
+      errorMessage1: "",
+      errorMessage2: "",
+      maxValue: max_tmp,
+      observeNewThresholdValue: function () {
+        var thresh1 = this.get('thresh1');
+        var thresh2 = this.get('thresh2');
+        if (thresh1.trim() != "") {
+          if (isNaN(thresh1) || thresh1 > max_tmp || thresh1 < 0){
+            this.set('isThresh1Error', true);
+            this.set('errorMessage1', 'Invalid! Enter a number between 0 - ' + max_tmp);
+          } else if ( this.get('isThresh2Error') === false && parseFloat(thresh2)<= parseFloat(thresh1)) {
+            this.set('isThresh1Error', true);
+            this.set('errorMessage1', 'Threshold 1 should be smaller than threshold 2 !');
+          } else {
+            this.set('isThresh1Error', false);
+            this.set('errorMessage1', '');
+          }
+        } else {
+          this.set('isThresh1Error', true);
+          this.set('errorMessage1', 'This is required');
+        }
+
+        if (thresh2.trim() != "") {
+          if (isNaN(thresh2) || thresh2 > max_tmp || thresh2 < 0) {
+            this.set('isThresh2Error', true);
+            this.set('errorMessage2', 'Invalid! Enter a number between 0 - ' + max_tmp);
+          } else {
+            this.set('isThresh2Error', false);
+            this.set('errorMessage2', '');
+          }
+        } else {
+          this.set('isThresh2Error', true);
+          this.set('errorMessage2', 'This is required');
+        }
+
+        // update the slider handles and color
+        if (this.get('isThresh1Error') === false && this.get('isThresh2Error') === false) {
+          $("#slider-range").slider('values', 0 , parseFloat(thresh1));
+          $("#slider-range").slider('values', 1 , parseFloat(thresh2));
+        }
+      }.observes('thresh1', 'thresh2')
+
+    });
+
+    var browserVerion = this.getInternetExplorerVersion();
+    App.ModalPopup.show({
+      header: Em.I18n.t('dashboard.widgets.popupHeader'),
+      classNames: [ 'sixty-percent-width-modal-edit-widget'],
+      bodyClass: Ember.View.extend({
+        templateName: require('templates/main/dashboard/edit_widget_popup'),
+        configPropertyObj: configObj
+      }),
+      primary: Em.I18n.t('common.apply'),
+      onPrimary: function () {
+        configObj.observeNewThresholdValue();
+        if (!configObj.isThresh1Error && !configObj.isThresh2Error) {
+          parent.set('thresh1', parseFloat(configObj.get('thresh1')) );
+          parent.set('thresh2', parseFloat(configObj.get('thresh2')) );
+          if (!App.get('testMode')) {
+            var big_parent = parent.get('parentView');
+            big_parent.getUserPref(big_parent.get('persistKey'));
+            var oldValue = big_parent.get('currentPrefObject');
+            oldValue.threshold[parseInt(parent.id)] = [configObj.get('thresh1'), configObj.get('thresh2')];
+            big_parent.postUserPref(big_parent.get('persistKey'),oldValue);
+          }
+          this.hide();
+        }
+      },
+
+      didInsertElement: function () {
+        var handlers = [configObj.get('thresh1'), configObj.get('thresh2')];
+        var colors = [App.healthStatusRed, App.healthStatusOrange, App.healthStatusGreen]; //color red, orange, green
+
+        if (browserVerion == -1 || browserVerion > 9) {
+          configObj.set('isIE9', false);
+          configObj.set('isGreenOrangeRed', false);
+          $("#slider-range").slider({
+            range: true,
+            min: 0,
+            max: max_tmp,
+            values: handlers,
+            create: function (event, ui) {
+              parent.updateColors(handlers, colors);
+            },
+            slide: function (event, ui) {
+              parent.updateColors(ui.values, colors);
+              configObj.set('thresh1', ui.values[0] + '');
+              configObj.set('thresh2', ui.values[1] + '');
+            },
+            change: function (event, ui) {
+              parent.updateColors(ui.values, colors);
+            }
+          });
+        } else {
+          configObj.set('isIE9', true);
+          configObj.set('isGreenOrangeRed', false);
+        }
+      }
+    });
+  }
+});


[49/50] [abbrv] ambari git commit: AMBARI-15147. Patch Upgrade: integrate API to POST new repo version when registering new version.(xiwang)

Posted by nc...@apache.org.
AMBARI-15147. Patch Upgrade: integrate API to  POST new repo version when registering new version.(xiwang)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/a5b673dd
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/a5b673dd
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/a5b673dd

Branch: refs/heads/trunk
Commit: a5b673dd18b5849792aace38f8e377f57f45b610
Parents: e5d580f
Author: Xi Wang <xi...@apache.org>
Authored: Thu Feb 18 11:27:10 2016 -0800
Committer: Xi Wang <xi...@apache.org>
Committed: Tue Feb 23 16:05:23 2016 -0800

----------------------------------------------------------------------
 .../stackVersions/StackVersionsCreateCtrl.js    | 175 +++++++++++++------
 .../ui/admin-web/app/scripts/i18n.config.js     |   1 +
 .../ui/admin-web/app/scripts/services/Stack.js  | 167 +++---------------
 .../views/stackVersions/stackVersionPage.html   |   8 +-
 .../app/mappers/repository_version_mapper.js    | 152 ++++++++++------
 ambari-web/app/styles/stack_versions.less       |  10 +-
 .../admin/stack_upgrade/upgrade_version_box.hbs |  10 +-
 .../stack_upgrade/upgrade_version_box_view.js   |   3 +
 .../upgrade_version_column_view.js              |   3 +
 9 files changed, 275 insertions(+), 254 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/a5b673dd/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/stackVersions/StackVersionsCreateCtrl.js
----------------------------------------------------------------------
diff --git a/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/stackVersions/StackVersionsCreateCtrl.js b/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/stackVersions/StackVersionsCreateCtrl.js
index 190670a..cf0ca2a 100644
--- a/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/stackVersions/StackVersionsCreateCtrl.js
+++ b/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/stackVersions/StackVersionsCreateCtrl.js
@@ -36,7 +36,7 @@ angular.module('ambariAdminConsole')
   $scope.option1 = {
     index: 1,
     displayName: 'Upload Version Definition File',
-    url: 'files://',
+    file: null,
     hasError: false
   };
   $scope.option2 = {
@@ -45,7 +45,9 @@ angular.module('ambariAdminConsole')
     url: 'https://',
     hasError: false
   };
-  $scope.selectedOption = 1;
+  $scope.selectedOption = {
+    index: 1
+  };
 
   /**
    * User can select ONLY one option to upload version definition file
@@ -59,64 +61,78 @@ angular.module('ambariAdminConsole')
     $scope.option2.hasError = false;
   };
   $scope.readInfoButtonDisabled = function () {
-    return $scope.option1.selected ? !$scope.option1.url : !$scope.option2.url;
+    return $scope.option1.index == $scope.selectedOption.index ? !$scope.option1.file : !$scope.option2.url;
   };
 
-  $scope.onFileSelect = function(){
-    return {
-      link: function($scope,el){
-        el.bind("change", function(e){
-          $scope.file = (e.srcElement || e.target).files[0];
-          $scope.getFile();
-        })
-      }
+  $scope.onFileSelect = function(e){
+    if (e.files && e.files.length == 1) {
+      var file = e.files[0];
+      var reader = new FileReader();
+      reader.onload = (function () {
+        return function (e) {
+          $scope.option1.file = e.target.result;
+        };
+      })(file);
+      reader.readAsText(file);
     }
   };
 
-//  $scope.uploadFile = function(){
-//    var file = $scope.myFile;
-//    console.log('file is ' );
-//    console.dir(file);
-//    var uploadUrl = "/fileUpload";
-//    fileUpload.uploadFileToUrl(file, uploadUrl);
-//  };
-
   /**
    * Load selected file to current page content
    */
   $scope.readVersionInfo = function(){
-    if ($scope.option2.selected) {
+    var data = {};
+    var isXMLdata = false;
+    if ($scope.option2.index == $scope.selectedOption.index) {
       var url = $scope.option2.url;
-    }
-    /// POST url first then get the version definition info
-    return Stack.getLatestRepo('HDP').then(function (response) {
-      $scope.id = response.id;
-      $scope.isPatch = response.type == 'PATCH';
-      $scope.stackNameVersion = response.stackNameVersion || 'n/a';
-      $scope.displayName = response.displayName || 'n/a';
-      $scope.version = response.version || 'n/a';
-      $scope.actualVersion = response.actualVersion || 'n/a';
-      $scope.upgradeStack = {
-        stack_name: response.stackName,
-        stack_version: response.stackVersion,
-        display_name: response.displayName
+      data = {
+        "VersionDefinition": {
+          "version_url": url
+        }
       };
-      $scope.services = response.services || [];
-      //save default values of repos to check if they were changed
-      $scope.defaulfOSRepos = {};
-      response.updateObj.operating_systems.forEach(function(os) {
-        $scope.defaulfOSRepos[os.OperatingSystems.os_type] = {
-          defaultBaseUrl: os.repositories[0].Repositories.base_url,
-          defaultUtilsUrl: os.repositories[1].Repositories.base_url
-        };
-      });
-      $scope.repoVersionFullName = response.repoVersionFullName;
-      angular.forEach(response.osList, function (os) {
-        os.selected = true;
-      });
-      $scope.osList = response.osList;
-      // load supported os type base on stack version
-      $scope.afterStackVersionRead();
+    } else if ($scope.option1.index == $scope.selectedOption.index) {
+      isXMLdata = true;
+      // load from file browser
+      data = $scope.option1.file;
+    }
+
+    return Stack.postVersionDefinitionFile(isXMLdata, data).then(function (versionInfo) {
+      if (versionInfo.id && versionInfo.stackName && versionInfo.stackVersion) {
+        Stack.getRepo(versionInfo.id, versionInfo.stackName, versionInfo.stackVersion)
+          .then(function (response) {
+            $scope.id = response.id;
+            $scope.isPatch = response.type == 'PATCH';
+            $scope.stackNameVersion = response.stackNameVersion || 'n/a';
+            $scope.displayName = response.displayName || 'n/a';
+            $scope.version = response.version || 'n/a';
+            $scope.actualVersion = response.actualVersion || 'n/a';
+            $scope.updateObj = response.updateObj;
+            $scope.upgradeStack = {
+              stack_name: response.stackName,
+              stack_version: response.stackVersion,
+              display_name: response.displayName
+            };
+            $scope.services = response.services || [];
+            //save default values of repos to check if they were changed
+            $scope.defaulfOSRepos = {};
+            response.updateObj.operating_systems.forEach(function(os) {
+              $scope.defaulfOSRepos[os.OperatingSystems.os_type] = {
+                defaultBaseUrl: os.repositories[0].Repositories.base_url,
+                defaultUtilsUrl: os.repositories[1].Repositories.base_url
+              };
+            });
+            $scope.repoVersionFullName = response.repoVersionFullName;
+            angular.forEach(response.osList, function (os) {
+              os.selected = true;
+            });
+            $scope.osList = response.osList;
+            // load supported os type base on stack version
+            $scope.afterStackVersionRead();
+        });
+      }
+    })
+    .catch(function (data) {
+      Alert.error($t('versions.alerts.readVersionInfoError'), data.message);
     });
   };
 
@@ -180,18 +196,41 @@ angular.module('ambariAdminConsole')
     return !enabled;
   }
 
+  $scope.defaulfOSRepos = {};
+
   $scope.save = function () {
+    $scope.editVersionDisabled = true;
+    delete $scope.updateObj.href;
+    $scope.updateObj.operating_systems = [];
+    var updateRepoUrl = false;
+    angular.forEach($scope.osList, function (os) {
+      var savedUrls = $scope.defaulfOSRepos[os.OperatingSystems.os_type];
+      if (os.selected) {
+        var currentRepos = os.repositories;
+        if (!savedUrls || currentRepos[0].Repositories.base_url != savedUrls.defaultBaseUrl
+          || currentRepos[1].Repositories.base_url != savedUrls.defaultUtilsUrl) {
+          updateRepoUrl = true;
+        }
+        $scope.updateObj.operating_systems.push(os);
+      } else if (savedUrls) {
+        updateRepoUrl = true;
+      }
+    });
+    $scope.updateRepoVersions();
+  };
+
+  $scope.updateRepoVersions = function () {
     return Stack.validateBaseUrls($scope.skipValidation, $scope.osList, $scope.upgradeStack).then(function (invalidUrls) {
       if (invalidUrls.length === 0) {
-        Stack.addRepo($scope.upgradeStack, $scope.actualVersion, $scope.osList)
-          .success(function () {
-            var versionName = $scope.upgradeStack.selected.stack_version + '.' + $scope.repoSubversion;
-            var stackName = $scope.upgradeStack.selected.stack_name;
-            Alert.success($t('versions.alerts.versionCreated', {stackName: stackName, versionName: versionName}));
-            $location.path('/stackVersions');
-          })
-          .error(function (data) {
-              Alert.error($t('versions.alerts.versionCreationError'), data.message);
+        Stack.updateRepo($scope.upgradeStack.stack_name, $scope.upgradeStack.stack_version, $scope.id, $scope.updateObj).then(function () {
+          Alert.success($t('versions.alerts.versionEdited', {
+            stackName: $scope.upgradeStack.stack_name,
+            versionName: $scope.actualVersion,
+            displayName: $scope.repoVersionFullName
+          }));
+          $location.path('/stackVersions');
+        }).catch(function (data) {
+            Alert.error($t('versions.alerts.versionUpdateError'), data.message);
           });
       } else {
         Stack.highlightInvalidUrls(invalidUrls);
@@ -199,6 +238,26 @@ angular.module('ambariAdminConsole')
     });
   };
 
+//
+//  $scope.save = function () {
+//    return Stack.validateBaseUrls($scope.skipValidation, $scope.osList, $scope.upgradeStack).then(function (invalidUrls) {
+//      if (invalidUrls.length === 0) {
+//        Stack.addRepo($scope.upgradeStack, $scope.actualVersion, $scope.osList)
+//          .success(function () {
+//            var versionName = $scope.upgradeStack.selected.stack_version + '.' + $scope.repoSubversion;
+//            var stackName = $scope.upgradeStack.selected.stack_name;
+//            Alert.success($t('versions.alerts.versionCreated', {stackName: stackName, versionName: versionName}));
+//            $location.path('/stackVersions');
+//          })
+//          .error(function (data) {
+//              Alert.error($t('versions.alerts.versionCreationError'), data.message);
+//          });
+//      } else {
+//        Stack.highlightInvalidUrls(invalidUrls);
+//      }
+//    });
+//  };
+
   $scope.cancel = function () {
     $scope.editVersionDisabled = true;
     $location.path('/stackVersions');

http://git-wip-us.apache.org/repos/asf/ambari/blob/a5b673dd/ambari-admin/src/main/resources/ui/admin-web/app/scripts/i18n.config.js
----------------------------------------------------------------------
diff --git a/ambari-admin/src/main/resources/ui/admin-web/app/scripts/i18n.config.js b/ambari-admin/src/main/resources/ui/admin-web/app/scripts/i18n.config.js
index 0dc10c1..ce0ff41 100644
--- a/ambari-admin/src/main/resources/ui/admin-web/app/scripts/i18n.config.js
+++ b/ambari-admin/src/main/resources/ui/admin-web/app/scripts/i18n.config.js
@@ -342,6 +342,7 @@ angular.module('ambariAdminConsole')
         'versionCreated': 'Created version <a href="#/stackVersions/{{stackName}}/{{versionName}}/edit">{{stackName}}-{{versionName}}</a>',
         'versionCreationError': 'Version creation error',
         'osListError': 'getSupportedOSList error',
+        'readVersionInfoError': 'Version Definition read error',
         'versionEdited': 'Edited version <a href="#/stackVersions/{{stackName}}/{{versionName}}/edit">{{displayName}}</a>',
         'versionUpdateError': 'Version update error',
         'versionDeleteError': 'Version delete error'

http://git-wip-us.apache.org/repos/asf/ambari/blob/a5b673dd/ambari-admin/src/main/resources/ui/admin-web/app/scripts/services/Stack.js
----------------------------------------------------------------------
diff --git a/ambari-admin/src/main/resources/ui/admin-web/app/scripts/services/Stack.js b/ambari-admin/src/main/resources/ui/admin-web/app/scripts/services/Stack.js
index ae67c7e..4ba0fc1 100644
--- a/ambari-admin/src/main/resources/ui/admin-web/app/scripts/services/Stack.js
+++ b/ambari-admin/src/main/resources/ui/admin-web/app/scripts/services/Stack.js
@@ -145,11 +145,21 @@ angular.module('ambariAdminConsole')
       return $http.post(Settings.baseUrl + url, payloadWrap);
     },
 
-    getRepo: function (repoVersion, stack_name) {
-      var url = Settings.baseUrl + '/stacks/' + stack_name + '/versions?' +
-      'fields=repository_versions/operating_systems/repositories/*' +
-      ',repository_versions/RepositoryVersions/display_name' +
-      '&repository_versions/RepositoryVersions/repository_version=' + repoVersion;
+    getRepo: function (repoVersion, stack_name, stack_version) {
+      if (stack_version) {
+        // get repo by stack version(2.3) and id (112)
+        var url = Settings.baseUrl + '/stacks/' + stack_name + '/versions?' +
+          'fields=repository_versions/operating_systems/repositories/*' +
+          ',repository_versions/RepositoryVersions/*' +
+          '&repository_versions/RepositoryVersions/id=' + repoVersion +
+          '&Versions/stack_version=' + stack_version;
+      } else {
+        // get repo by repoVersion (2.3.6.0-2345)
+        var url = Settings.baseUrl + '/stacks/' + stack_name + '/versions?' +
+          'fields=repository_versions/operating_systems/repositories/*' +
+          ',repository_versions/RepositoryVersions/*' +
+          '&repository_versions/RepositoryVersions/repository_version=' + repoVersion;
+      }
       var deferred = $q.defer();
       $http.get(url, {mock: 'version/version.json'})
       .success(function (data) {
@@ -186,140 +196,21 @@ angular.module('ambariAdminConsole')
       return deferred.promise;
     },
 
-    getLatestRepo: function (stack_name) {
-      var url = Settings.baseUrl + '/stacks/' + stack_name + '/versions?' +
-        'fields=repository_versions/operating_systems/repositories/*' +
-        ',repository_versions/RepositoryVersions/*';  // tbd
-      var deferred = $q.defer();
-      $http.get(url, {mock: 'version/version.json'})
-        .success(function (data) {
-          //data = data.items[0];
-          data = {
-            "href" : "http://c6401.ambari.apache.org:8080/api/v1/stacks/HDP/versions/2.3",
-            "Versions" : {
-              "stack_name" : "HDP",
-              "stack_version" : "2.3"
-            },
-            "repository_versions" : [
-              {
-                "href" : "http://c6401.ambari.apache.org:8080/api/v1/stacks/HDP/versions/2.3/repository_versions/15",
-                "RepositoryVersions" : {
-                  "id" : 15,
-                  "repository_version" : "2.3.6.0-3509",
-                  "stack_name" : "HDP",
-                  "stack_version" : "2.3",
-                  "type": "PATCH",
-                  "release": {
-                    "stack_id": "HDP-2.3",
-                    "version": "2.3.6.0",
-                    "build": "3509",
-                    "compatible_with": "2.3.6.0-[1-9]",
-                    "release_notes": "http://someurl"
-                  },
-                  "services": [
-                    {
-                      "name": "HDFS",
-                      "display_name": "HDFS",
-                      "versions": [
-                        {
-                          "version": "2.1.1",
-                          "version_id": "10",
-                          "components": [ "NAMENODE"]
-                        }
-                      ]
-                    },
-                    {
-                      "name": "HIVE",
-                      "display_name": "Hive",
-                      "versions": [
-                        {
-                          "version": "1.2.1"
-                        }
-                      ]
-                    },
-                    {
-                      "name": "ZOOKEEPER",
-                      "display_name": "ZooKeeper",
-                      "versions": [
-                        {
-                          "version": "3.4.5"
-                        }
-                      ]
-                    }
-                  ]
-                },
-                "operating_systems" : [
-                  {
-                    "href" : "http://c6401.ambari.apache.org:8080/api/v1/stacks/HDP/versions/2.3/repository_versions/15/operating_systems/redhat6",
-                    "OperatingSystems" : {
-                      "os_type" : "redhat6",
-                      "repository_version_id" : 15,
-                      "stack_name" : "HDP",
-                      "stack_version" : "2.3"
-                    },
-                    "repositories" : [
-                      {
-                        "href" : "http://c6401.ambari.apache.org:8080/api/v1/stacks/HDP/versions/2.3/repository_versions/15/operating_systems/redhat6/repositories/HDP-2.3.6.0-3509",
-                        "Repositories" : {
-                          "base_url" : "http://s3.amazonaws.com/dev.hortonworks.com/HDP/centos6/2.x/BUILDS/2.3.6.0-3509",
-                          "default_base_url" : "",
-                          "latest_base_url" : "",
-                          "mirrors_list" : "",
-                          "os_type" : "redhat6",
-                          "repo_id" : "HDP-2.3.6.0-3509",
-                          "repo_name" : "HDP",
-                          "repository_version_id" : 15,
-                          "stack_name" : "HDP",
-                          "stack_version" : "2.3"
-                        }
-                      },
-                      {
-                        "href" : "http://c6401.ambari.apache.org:8080/api/v1/stacks/HDP/versions/2.3/repository_versions/15/operating_systems/redhat6/repositories/HDP-UTILS-2.3.6.0-3509",
-                        "Repositories" : {
-                          "base_url" : "http://s3.amazonaws.com/dev.hortonworks.com/HDP-UTILS-1.1.0.20/repos/centos6",
-                          "default_base_url" : "",
-                          "latest_base_url" : "",
-                          "mirrors_list" : "",
-                          "os_type" : "redhat6",
-                          "repo_id" : "HDP-UTILS-2.3.6.0-3509",
-                          "repo_name" : "HDP-UTILS",
-                          "repository_version_id" : 15,
-                          "stack_name" : "HDP",
-                          "stack_version" : "2.3"
-                        }
-                      }
-                    ]
-                  }
-                ]
-              }
-            ]
-          };
+    postVersionDefinitionFile: function (isXMLdata, data) {
+      var deferred = $q.defer(),
+        url = Settings.baseUrl + '/version_definitions',
+        configs = isXMLdata? { headers: {'Content-Type': 'text/xml'}} : null;
 
-          var response = {
-            id : data.repository_versions[0].RepositoryVersions.id,
-            stackVersion : data.Versions.stack_version,
-            stackName: data.Versions.stack_name,
-            type: data.repository_versions[0].RepositoryVersions.type,
-            stackNameVersion: data.Versions.stack_name + '-' + data.Versions.stack_version, /// HDP-2.3
-            actualVersion: data.repository_versions[0].RepositoryVersions.repository_version, /// 2.3.4.0-3846
-            version: data.repository_versions[0].RepositoryVersions.release ? data.repository_versions[0].RepositoryVersions.release.version: null, /// 2.3.4.0
-            releaseNotes: data.repository_versions[0].RepositoryVersions.release ? data.repository_versions[0].RepositoryVersions.release.release_notes: null,
-            displayName: data.repository_versions[0].RepositoryVersions.release ? data.Versions.stack_name + '-' + data.repository_versions[0].RepositoryVersions.release.version :
-              data.Versions.stack_name + '-' + data.repository_versions[0].RepositoryVersions.repository_version.split('-')[0], //HDP-2.3.4.0
-            repoVersionFullName : data.Versions.stack_name + '-' + data.repository_versions[0].RepositoryVersions.repository_version,
-            osList: data.repository_versions[0].operating_systems,
-            updateObj: data.repository_versions[0]
-          };
-          var services = [];
-          angular.forEach(data.repository_versions[0].RepositoryVersions.services, function (service) {
-            services.push({
-              name: service.display_name,
-              version: service.versions[0].version,
-              components: service.versions[0].components
-            });
-          });
-          response.services = services;
-          deferred.resolve(response);
+      $http.post(url, data, configs)
+        .success(function (response) {
+          if (response.resources.length && response.resources[0].VersionDefinition) {
+            deferred.resolve(
+              {
+                stackName: response.resources[0].VersionDefinition.stack_name,
+                id: response.resources[0].VersionDefinition.id,
+                stackVersion: response.resources[0].VersionDefinition.stack_version
+              });
+          }
         })
         .error(function (data) {
           deferred.reject(data);

http://git-wip-us.apache.org/repos/asf/ambari/blob/a5b673dd/ambari-admin/src/main/resources/ui/admin-web/app/views/stackVersions/stackVersionPage.html
----------------------------------------------------------------------
diff --git a/ambari-admin/src/main/resources/ui/admin-web/app/views/stackVersions/stackVersionPage.html b/ambari-admin/src/main/resources/ui/admin-web/app/views/stackVersions/stackVersionPage.html
index 112ded8..b8d51b5 100644
--- a/ambari-admin/src/main/resources/ui/admin-web/app/views/stackVersions/stackVersionPage.html
+++ b/ambari-admin/src/main/resources/ui/admin-web/app/views/stackVersions/stackVersionPage.html
@@ -36,22 +36,22 @@
   <div class="clearfix register-version-options">
     <div class="col-sm-5 option-radio-button">
       <label class="option-label">
-        <input type="radio" ng-model="selectedOption" value="{{option1.index}}" ng-change="toggleOptionSelect()"> {{'versions.uploadFile' | translate}}
+        <input type="radio" ng-model="selectedOption.index" value="1" ng-change="toggleOptionSelect()"> {{'versions.uploadFile' | translate}}
       </label>
     </div>
     <div class="col-sm-7">
-      <input type="file" class="choose-file-input" ng-file-select="onFileSelect($files)"/>
+      <input type="file" class="choose-file-input" onchange="angular.element(this).scope().onFileSelect(this)"/>
     </div>
   </div>
   <div class="clearfix register-version-options border-bottom bottom-margin">
     <div class="col-sm-5 option-radio-button">
       <label class="option-label">
-        <input type="radio" ng-model="selectedOption" value="{{option2.index}}" ng-change="toggleOptionSelect()"> {{'versions.enterURL' | translate}}
+        <input type="radio" ng-model="selectedOption.index" value="2" ng-change="toggleOptionSelect()"> {{'versions.enterURL' | translate}}
       </label>
     </div>
     <div class="col-sm-7">
       <div class="form-group {{option2.name}}" ng-class="{'has-error': option2.url.hasError }">
-        <div class=""><input type="text" class="form-control" ng-model="option2.url" ng-change="clearOptionsError()" ng-disabled="!(selectedOption==2)"></div>
+        <div class=""><input type="text" class="form-control" ng-model="option2.url" ng-change="clearOptionsError()" ng-disabled="!(selectedOption.index==2)"></div>
       </div>
     </div>
     <div class="col-sm-12 read-info-button">

http://git-wip-us.apache.org/repos/asf/ambari/blob/a5b673dd/ambari-web/app/mappers/repository_version_mapper.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/mappers/repository_version_mapper.js b/ambari-web/app/mappers/repository_version_mapper.js
index c937678..be85cd2 100644
--- a/ambari-web/app/mappers/repository_version_mapper.js
+++ b/ambari-web/app/mappers/repository_version_mapper.js
@@ -97,7 +97,6 @@ App.repoVersionMapper = App.QuickDataMapper.create({
     if (json && json.items) {
       json.items.forEach(function (item) {
         ////////////// to test//////////////
-        item[repoVersionsKey].type = "PATCH";
         item[repoVersionsKey].release = {
           "stack_id": "HDP-2.2",
           "version": "2.2.4.1",
@@ -105,55 +104,108 @@ App.repoVersionMapper = App.QuickDataMapper.create({
           "compatible_with": "", /* regex */
           "release_notes": "http://someurl"
         };
-        item[repoVersionsKey].services = [
-          {
-            "name": "HDFS",
-            "display_name": "HDFS",
-            "versions": [
-              {
-                "version": "2.1.1",
-                "version_id": "10",
-                "components": [ "NAMENODE"]
-              }
-            ]
-          },
-          {
-            "name": "YARN",
-            "display_name": "YARN",
-            "versions": [
-              {
-                "version": "2.7.1.2.3"
-              }
-            ]
-          },
-          {
-            "name": "ZOOKEEPER",
-            "display_name": "ZooKeeper",
-            "versions": [
-              {
-                "version": "3.4.6.2.3"
-              }
-            ]
-          },
-          {
-            "name": "wrong",
-            "display_name": "MapReduce2",
-            "versions": [
-              {
-                "version": "2.7.1.2.3"
-              }
-            ]
-          },
-          {
-            "name": "AMBARI_METRICS",
-            "display_name": "Ambari Metrics",
-            "versions": [
-              {
-                "version": "0.1.0"
-              }
-            ]
-          }
-        ];
+        if (item[repoVersionsKey].id  % 2 ) {
+          item[repoVersionsKey].type = "PATCH";
+          item[repoVersionsKey].services = [
+            {
+              "name": "HDFS",
+              "display_name": "HDFS",
+              "versions": [
+                {
+                  "version": "2.1.1",
+                  "version_id": "10",
+                  "components": [ "NAMENODE"]
+                }
+              ]
+            },
+            {
+              "name": "YARN",
+              "display_name": "YARN",
+              "versions": [
+                {
+                  "version": "2.7.1.2.3"
+                }
+              ]
+            },
+            {
+              "name": "ZOOKEEPER",
+              "display_name": "ZooKeeper",
+              "versions": [
+                {
+                  "version": "3.4.6.2.3"
+                }
+              ]
+            },
+            {
+              "name": "wrong",
+              "display_name": "MapReduce2",
+              "versions": [
+                {
+                  "version": "2.7.1.2.3"
+                }
+              ]
+            },
+            {
+              "name": "AMBARI_METRICS",
+              "display_name": "Ambari Metrics",
+              "versions": [
+                {
+                  "version": "0.1.0"
+                }
+              ]
+            }
+          ];
+        } else {
+          item[repoVersionsKey].services = [
+            {
+              "name": "HDFS",
+              "display_name": "HDFS",
+              "versions": [
+                {
+                  "version": "2.1.1",
+                  "version_id": "10",
+                  "components": [ "NAMENODE"]
+                }
+              ]
+            },
+            {
+              "name": "wrong",
+              "display_name": "YARN",
+              "versions": [
+                {
+                  "version": "2.7.1.2.3"
+                }
+              ]
+            },
+            {
+              "name": "wrong",
+              "display_name": "ZooKeeper",
+              "versions": [
+                {
+                  "version": "3.4.6.2.3"
+                }
+              ]
+            },
+            {
+              "name": "MAPREDUCE2",
+              "display_name": "MapReduce2",
+              "versions": [
+                {
+                  "version": "2.7.1.2.3"
+                }
+              ]
+            },
+            {
+              "name": "AMBARI_METRICS",
+              "display_name": "Ambari Metrics",
+              "versions": [
+                {
+                  "version": "0.1.0"
+                }
+              ]
+            }
+          ];
+        }
         //////////////////////////////
 
         if (loadAll || (item[repoVersionsKey] && !App.StackVersion.find().someProperty('repositoryVersion.id', item[repoVersionsKey].id))) {

http://git-wip-us.apache.org/repos/asf/ambari/blob/a5b673dd/ambari-web/app/styles/stack_versions.less
----------------------------------------------------------------------
diff --git a/ambari-web/app/styles/stack_versions.less b/ambari-web/app/styles/stack_versions.less
index 5490592..25f474e 100644
--- a/ambari-web/app/styles/stack_versions.less
+++ b/ambari-web/app/styles/stack_versions.less
@@ -172,7 +172,7 @@
     position: relative;
     background: none repeat scroll 0 0 #fff;
     border: 1px solid #d2d9dd;
-    height: 168px;
+    height: 190px;
     margin: 15px 15px 0 0;
     padding: 5px 0;
     a.not-active:hover {
@@ -245,6 +245,14 @@
       margin: 0 0px -18px 1px;
       padding: 1px;
     }
+    .patch-icon {
+      text-align: center;
+      color: #ff4500;
+      height: 20px;
+      font-size: 14px;
+      margin: 18px 0px -18px 1px;
+      padding: 1px;
+    }
     .hosts-section {
       margin-top: 16px;
       width: 100%;

http://git-wip-us.apache.org/repos/asf/ambari/blob/a5b673dd/ambari-web/app/templates/main/admin/stack_upgrade/upgrade_version_box.hbs
----------------------------------------------------------------------
diff --git a/ambari-web/app/templates/main/admin/stack_upgrade/upgrade_version_box.hbs b/ambari-web/app/templates/main/admin/stack_upgrade/upgrade_version_box.hbs
index 84d7835..dcc6944 100644
--- a/ambari-web/app/templates/main/admin/stack_upgrade/upgrade_version_box.hbs
+++ b/ambari-web/app/templates/main/admin/stack_upgrade/upgrade_version_box.hbs
@@ -27,9 +27,13 @@
   </a>
 </p>
 <p class="repository-name">({{view.content.repositoryVersion}})</p>
-{{#if view.isPatch}}
-  <p class="patch-icon"></p>
-{{/if}}
+
+<p class="patch-icon">
+  {{#if view.isPatch}}
+    <i class="icon-umbrella"></i>&nbsp;{{t common.patch}}
+  {{/if}}
+</p>
+
 
 <div {{bindAttr class="view.stateElement.isInstalling:installing :align-center :state"}}>
   {{#if view.stateElement.isButton}}

http://git-wip-us.apache.org/repos/asf/ambari/blob/a5b673dd/ambari-web/app/views/main/admin/stack_upgrade/upgrade_version_box_view.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views/main/admin/stack_upgrade/upgrade_version_box_view.js b/ambari-web/app/views/main/admin/stack_upgrade/upgrade_version_box_view.js
index 1ea0c66..e0c0ae2 100644
--- a/ambari-web/app/views/main/admin/stack_upgrade/upgrade_version_box_view.js
+++ b/ambari-web/app/views/main/admin/stack_upgrade/upgrade_version_box_view.js
@@ -397,6 +397,9 @@ App.UpgradeVersionBoxView = Em.View.extend({
         secondary: Em.I18n.t('common.close'),
         onPrimary: function () {
           this.hide();
+          if ($('.version-box-popup .modal')) {
+            $('.version-box-popup .modal .modal-footer .btn-success').click();
+          }
           self.filterHostsByStack(displayName, status.id);
         }
       });

http://git-wip-us.apache.org/repos/asf/ambari/blob/a5b673dd/ambari-web/app/views/main/admin/stack_upgrade/upgrade_version_column_view.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views/main/admin/stack_upgrade/upgrade_version_column_view.js b/ambari-web/app/views/main/admin/stack_upgrade/upgrade_version_column_view.js
index d7e26e2..346c10c 100644
--- a/ambari-web/app/views/main/admin/stack_upgrade/upgrade_version_column_view.js
+++ b/ambari-web/app/views/main/admin/stack_upgrade/upgrade_version_column_view.js
@@ -86,6 +86,9 @@ App.UpgradeVersionColumnView = App.UpgradeVersionBoxView.extend({
     }
   },
 
+  /**
+   * on click handler for "show details" link
+   */
   openVersionBoxPopup: function (event) {
     var content = this.get('content');
     var parentView = this.get('parentView');


[36/50] [abbrv] ambari git commit: Merge with trunk

Posted by nc...@apache.org.
Merge with trunk


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/dea22be1
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/dea22be1
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/dea22be1

Branch: refs/heads/trunk
Commit: dea22be16c172ed0ca7a6e66ac29bda04027ca2f
Parents: 4f78af7
Author: Lisnichenko Dmitro <dl...@hortonworks.com>
Authored: Mon Feb 22 17:19:56 2016 +0200
Committer: Lisnichenko Dmitro <dl...@hortonworks.com>
Committed: Mon Feb 22 17:19:56 2016 +0200

----------------------------------------------------------------------
 .../python/resource_management/core/logger.py   |   2 +-
 .../timeline/AbstractTimelineMetricsSink.java   |  53 +++++-
 .../ApplicationHistoryServer.java               |   6 +-
 .../loadsimulator/net/RestMetricsSender.java    |   4 +-
 .../ApplicationHistoryStoreTestUtils.java       |   2 +-
 .../TestApplicationHistoryClientService.java    |   6 +-
 .../TestFileSystemApplicationHistoryStore.java  |  12 +-
 .../TestMemoryApplicationHistoryStore.java      |  12 +-
 .../webapp/TestAHSWebServices.java              |   2 +-
 .../server/configuration/Configuration.java     |  89 +++++++++
 .../server/controller/ControllerModule.java     |  16 ++
 .../controller/ServiceComponentRequest.java     |  35 +++-
 .../controller/ServiceComponentResponse.java    |  22 ++-
 .../internal/ComponentResourceProvider.java     |  38 ++++
 .../server/orm/dao/HostRoleCommandDAO.java      | 166 +++++++++++++---
 .../orm/entities/HostRoleCommandEntity.java     |   7 +-
 .../ServiceComponentDesiredStateEntity.java     |  11 ++
 .../serveraction/ServerActionExecutor.java      |  13 +-
 .../ambari/server/state/ServiceComponent.java   |  14 ++
 .../server/state/ServiceComponentImpl.java      |  80 +++++++-
 .../server/state/cluster/ClusterImpl.java       |  36 ++--
 .../services/AlertNoticeDispatchService.java    |  17 +-
 .../server/upgrade/UpgradeCatalog222.java       |   5 +
 .../server/upgrade/UpgradeCatalog240.java       |  14 ++
 .../main/resources/Ambari-DDL-Derby-CREATE.sql  |   2 +
 .../main/resources/Ambari-DDL-MySQL-CREATE.sql  |   3 +
 .../main/resources/Ambari-DDL-Oracle-CREATE.sql |   3 +
 .../resources/Ambari-DDL-Postgres-CREATE.sql    |   5 +
 .../Ambari-DDL-Postgres-EMBEDDED-CREATE.sql     |   3 +
 .../resources/Ambari-DDL-SQLAnywhere-CREATE.sql |   3 +
 .../resources/Ambari-DDL-SQLServer-CREATE.sql   |   5 +-
 .../src/main/resources/alert-templates.xml      |  20 +-
 .../0.8.1.2.2/configuration/kafka-broker.xml    |   1 -
 .../src/main/resources/properties.json          |   1 +
 .../main/resources/scripts/Ambaripreupload.py   |  41 ++--
 .../stacks/HDP/2.0.6/services/stack_advisor.py  |   5 +-
 .../stacks/HDP/2.2/services/stack_advisor.py    |  17 +-
 .../stacks/HDP/2.3/services/stack_advisor.py    |  32 ++--
 .../stacks/HDP/2.4/services/HIVE/metainfo.xml   |  41 +++-
 .../actionmanager/TestActionScheduler.java      |  24 +--
 .../ambari/server/agent/AgentResourceTest.java  |   2 +
 .../server/configuration/ConfigurationTest.java |  95 ++++++++++
 .../AmbariManagementControllerTest.java         |   3 +
 .../server/controller/KerberosHelperTest.java   |   2 +
 .../internal/ComponentResourceProviderTest.java |  37 ++--
 .../ambari/server/stack/StackManagerTest.java   |  35 ++--
 .../ambari/server/state/ConfigHelperTest.java   |   2 +
 .../server/upgrade/UpgradeCatalog222Test.java   |  55 +++++-
 .../server/upgrade/UpgradeCatalog240Test.java   |  12 ++
 .../ambari/server/utils/StageUtilsTest.java     |   2 +
 .../stacks/2.0.6/common/test_stack_advisor.py   |  12 +-
 .../stacks/2.2/common/test_stack_advisor.py     |  14 +-
 .../stacks/2.3/common/test_stack_advisor.py     |  46 +++++
 ambari-web/app/assets/test/tests.js             |   1 +
 .../hawq/addStandby/step3_controller.js         |   2 +-
 .../app/mappers/components_state_mapper.js      |   5 +
 ambari-web/app/messages.js                      |   5 +
 .../app/models/alerts/alert_definition.js       |   4 +-
 ambari-web/app/views.js                         |   1 +
 .../configs/widgets/list_config_widget_view.js  |  11 +-
 ambari-web/app/views/main/dashboard/widgets.js  |  19 +-
 .../main/dashboard/widgets/hawqsegment_live.js  | 190 +++++++++++++++++++
 .../dashboard/widgets/hawqsegment_live_test.js  |  69 +++++++
 63 files changed, 1270 insertions(+), 222 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/dea22be1/ambari-common/src/main/python/resource_management/core/logger.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/core/logger.py b/ambari-common/src/main/python/resource_management/core/logger.py
index fd05b02..5bbd35b 100644
--- a/ambari-common/src/main/python/resource_management/core/logger.py
+++ b/ambari-common/src/main/python/resource_management/core/logger.py
@@ -173,4 +173,4 @@ class Logger:
     if arguments_str:
       arguments_str = arguments_str[:-2]
         
-    return unicode("{0} {{{1}}}").format(name, arguments_str)
\ No newline at end of file
+    return unicode("{0} {{{1}}}", 'UTF-8').format(name, arguments_str)
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/dea22be1/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/AbstractTimelineMetricsSink.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/AbstractTimelineMetricsSink.java b/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/AbstractTimelineMetricsSink.java
index 2854898..b2810b7 100644
--- a/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/AbstractTimelineMetricsSink.java
+++ b/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/AbstractTimelineMetricsSink.java
@@ -28,9 +28,12 @@ import javax.net.ssl.HttpsURLConnection;
 import javax.net.ssl.SSLContext;
 import javax.net.ssl.SSLSocketFactory;
 import javax.net.ssl.TrustManagerFactory;
+import java.io.BufferedReader;
 import java.io.File;
 import java.io.FileInputStream;
 import java.io.IOException;
+import java.io.InputStream;
+import java.io.InputStreamReader;
 import java.io.OutputStream;
 import java.net.HttpURLConnection;
 import java.net.URL;
@@ -72,17 +75,19 @@ public abstract class AbstractTimelineMetricsSink {
   protected void emitMetrics(TimelineMetrics metrics) {
     String connectUrl = getCollectorUri();
     int timeout = getTimeoutSeconds() * 1000;
+    HttpURLConnection connection = null;
     try {
       if (connectUrl == null) {
         throw new IOException("Unknown URL. " +
           "Unable to connect to metrics collector.");
       }
       String jsonData = mapper.writeValueAsString(metrics);
-      HttpURLConnection connection = connectUrl.startsWith("https") ?
+      connection = connectUrl.startsWith("https") ?
         getSSLConnection(connectUrl) : getConnection(connectUrl);
 
       connection.setRequestMethod("POST");
       connection.setRequestProperty("Content-Type", "application/json");
+      connection.setRequestProperty("Connection", "Keep-Alive");
       connection.setConnectTimeout(timeout);
       connection.setReadTimeout(timeout);
       connection.setDoOutput(true);
@@ -103,14 +108,52 @@ public abstract class AbstractTimelineMetricsSink {
           LOG.debug("Metrics posted to Collector " + connectUrl);
         }
       }
-    } catch (IOException e) {
+      cleanupInputStream(connection.getInputStream());
+    } catch (IOException ioe) {
+      StringBuilder errorMessage =
+        new StringBuilder("Unable to connect to collector, " + connectUrl + "\n");
+      try {
+        if ((connection != null)) {
+          errorMessage.append(cleanupInputStream(connection.getErrorStream()));
+        }
+      } catch (IOException e) {
+        //NOP
+      }
       if (LOG.isDebugEnabled()) {
-        LOG.debug("Unable to connect to collector, " + connectUrl, e);
+        LOG.debug(errorMessage, ioe);
       } else {
-        LOG.info("Unable to connect to collector, " + connectUrl);
+        LOG.info(errorMessage);
+      }
+      throw new UnableToConnectException(ioe).setConnectUrl(connectUrl);
+    }
+  }
+
+  /**
+   * Cleans up and closes an input stream
+   * see http://docs.oracle.com/javase/6/docs/technotes/guides/net/http-keepalive.html
+   * @param is the InputStream to clean up
+   * @return string read from the InputStream
+   * @throws IOException
+   */
+  private String cleanupInputStream(InputStream is) throws IOException {
+    StringBuilder sb = new StringBuilder();
+    if (is != null) {
+      try (
+        InputStreamReader isr = new InputStreamReader(is);
+        BufferedReader br = new BufferedReader(isr)
+      ) {
+        // read the response body
+        String line;
+        while ((line = br.readLine()) != null) {
+          if (LOG.isDebugEnabled()) {
+            sb.append(line);
+          }
+        }
+      } finally {
+        is.close();
       }
-      throw new UnableToConnectException(e).setConnectUrl(connectUrl);
     }
+    return sb.toString();
   }
 
   // Get a connection

http://git-wip-us.apache.org/repos/asf/ambari/blob/dea22be1/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryServer.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryServer.java b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryServer.java
index 62a8cc3..1ca9c33 100644
--- a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryServer.java
+++ b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryServer.java
@@ -55,8 +55,8 @@ import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.ti
 public class ApplicationHistoryServer extends CompositeService {
 
   public static final int SHUTDOWN_HOOK_PRIORITY = 30;
-  private static final Log LOG = LogFactory
-    .getLog(ApplicationHistoryServer.class);
+  private static final Log LOG =
+    LogFactory.getLog(ApplicationHistoryServer.class);
 
   ApplicationHistoryClientService ahsClientService;
   ApplicationHistoryManager historyManager;
@@ -172,6 +172,8 @@ public class ApplicationHistoryServer extends CompositeService {
     LOG.info("Instantiating AHSWebApp at " + bindAddress);
     try {
       Configuration conf = metricConfiguration.getMetricsConf();
+      conf.set("hadoop.http.max.threads", String.valueOf(metricConfiguration
+        .getTimelineMetricsServiceHandlerThreadCount()));
       HttpConfig.Policy policy = HttpConfig.Policy.valueOf(
         conf.get(TimelineMetricConfiguration.TIMELINE_SERVICE_HTTP_POLICY,
           HttpConfig.Policy.HTTP_ONLY.name()));

http://git-wip-us.apache.org/repos/asf/ambari/blob/dea22be1/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/net/RestMetricsSender.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/net/RestMetricsSender.java b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/net/RestMetricsSender.java
index 0a9a513..32af851 100644
--- a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/net/RestMetricsSender.java
+++ b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/net/RestMetricsSender.java
@@ -24,6 +24,7 @@ import org.slf4j.LoggerFactory;
 import java.io.IOException;
 import java.net.MalformedURLException;
 import java.net.ProtocolException;
+import java.util.concurrent.TimeUnit;
 
 /**
  * Implements MetricsSender and provides a way of pushing metrics to application metrics history service using REST
@@ -65,7 +66,8 @@ public class RestMetricsSender implements MetricsSender {
       responseString = svc.send(payload);
 
       timer.stop();
-      LOG.info("http response time: " + timer.elapsedMillis() + " ms");
+      LOG.info("http response time: " + timer.elapsed(TimeUnit.MILLISECONDS)
+        + " ms");
 
       if (responseString.length() > 0) {
         LOG.debug("POST response from server: " + responseString);

http://git-wip-us.apache.org/repos/asf/ambari/blob/dea22be1/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryStoreTestUtils.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryStoreTestUtils.java b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryStoreTestUtils.java
index c41b8a7..ec9b49d 100644
--- a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryStoreTestUtils.java
+++ b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryStoreTestUtils.java
@@ -58,7 +58,7 @@ public class ApplicationHistoryStoreTestUtils {
       ApplicationAttemptId appAttemptId) throws IOException {
     store.applicationAttemptStarted(ApplicationAttemptStartData.newInstance(
       appAttemptId, appAttemptId.toString(), 0,
-      ContainerId.newInstance(appAttemptId, 1)));
+      ContainerId.newContainerId(appAttemptId, 1)));
   }
 
   protected void writeApplicationAttemptFinishData(

http://git-wip-us.apache.org/repos/asf/ambari/blob/dea22be1/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestApplicationHistoryClientService.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestApplicationHistoryClientService.java b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestApplicationHistoryClientService.java
index 2fdedb2..f93ac5e 100644
--- a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestApplicationHistoryClientService.java
+++ b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestApplicationHistoryClientService.java
@@ -168,7 +168,7 @@ public class TestApplicationHistoryClientService extends
     writeApplicationStartData(appId);
     ApplicationAttemptId appAttemptId =
         ApplicationAttemptId.newInstance(appId, 1);
-    ContainerId containerId = ContainerId.newInstance(appAttemptId, 1);
+    ContainerId containerId = ContainerId.newContainerId(appAttemptId, 1);
     writeContainerStartData(containerId);
     writeContainerFinishData(containerId);
     writeApplicationFinishData(appId);
@@ -189,8 +189,8 @@ public class TestApplicationHistoryClientService extends
     writeApplicationStartData(appId);
     ApplicationAttemptId appAttemptId =
         ApplicationAttemptId.newInstance(appId, 1);
-    ContainerId containerId = ContainerId.newInstance(appAttemptId, 1);
-    ContainerId containerId1 = ContainerId.newInstance(appAttemptId, 2);
+    ContainerId containerId = ContainerId.newContainerId(appAttemptId, 1);
+    ContainerId containerId1 = ContainerId.newContainerId(appAttemptId, 2);
     writeContainerStartData(containerId);
     writeContainerFinishData(containerId);
     writeContainerStartData(containerId1);

http://git-wip-us.apache.org/repos/asf/ambari/blob/dea22be1/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestFileSystemApplicationHistoryStore.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestFileSystemApplicationHistoryStore.java b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestFileSystemApplicationHistoryStore.java
index bc16d36..543c25b 100644
--- a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestFileSystemApplicationHistoryStore.java
+++ b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestFileSystemApplicationHistoryStore.java
@@ -94,7 +94,7 @@ public class TestFileSystemApplicationHistoryStore extends
         }
         // write container history data
         for (int k = 1; k <= num; ++k) {
-          ContainerId containerId = ContainerId.newInstance(appAttemptId, k);
+          ContainerId containerId = ContainerId.newContainerId(appAttemptId, k);
           writeContainerStartData(containerId);
           if (missingContainer && k == num) {
             continue;
@@ -144,7 +144,7 @@ public class TestFileSystemApplicationHistoryStore extends
         // read container history data
         Assert.assertEquals(num, store.getContainers(appAttemptId).size());
         for (int k = 1; k <= num; ++k) {
-          ContainerId containerId = ContainerId.newInstance(appAttemptId, k);
+          ContainerId containerId = ContainerId.newContainerId(appAttemptId, k);
           ContainerHistoryData containerData = store.getContainer(containerId);
           Assert.assertNotNull(containerData);
           Assert.assertEquals(Priority.newInstance(containerId.getId()),
@@ -159,7 +159,7 @@ public class TestFileSystemApplicationHistoryStore extends
         ContainerHistoryData masterContainer =
             store.getAMContainer(appAttemptId);
         Assert.assertNotNull(masterContainer);
-        Assert.assertEquals(ContainerId.newInstance(appAttemptId, 1),
+        Assert.assertEquals(ContainerId.newContainerId(appAttemptId, 1),
           masterContainer.getContainerId());
       }
     }
@@ -186,7 +186,7 @@ public class TestFileSystemApplicationHistoryStore extends
       Assert.assertTrue(e.getMessage().contains("is not opened"));
     }
     // write container history data
-    ContainerId containerId = ContainerId.newInstance(appAttemptId, 1);
+    ContainerId containerId = ContainerId.newContainerId(appAttemptId, 1);
     try {
       writeContainerStartData(containerId);
       Assert.fail();
@@ -209,8 +209,8 @@ public class TestFileSystemApplicationHistoryStore extends
     writeApplicationStartData(appId);
     ApplicationAttemptId appAttemptId =
         ApplicationAttemptId.newInstance(appId, 1);
-    for (int i = 1; i <= 100000; ++i) {
-      ContainerId containerId = ContainerId.newInstance(appAttemptId, i);
+    for (int i = 1; i <= 1000; ++i) {
+      ContainerId containerId = ContainerId.newContainerId(appAttemptId, i);
       writeContainerStartData(containerId);
       writeContainerFinishData(containerId);
     }

http://git-wip-us.apache.org/repos/asf/ambari/blob/dea22be1/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestMemoryApplicationHistoryStore.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestMemoryApplicationHistoryStore.java b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestMemoryApplicationHistoryStore.java
index fc5c096..b4da01a 100644
--- a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestMemoryApplicationHistoryStore.java
+++ b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestMemoryApplicationHistoryStore.java
@@ -137,7 +137,7 @@ public class TestMemoryApplicationHistoryStore extends
     ApplicationId appId = ApplicationId.newInstance(0, 1);
     ApplicationAttemptId appAttemptId =
         ApplicationAttemptId.newInstance(appId, 1);
-    ContainerId containerId = ContainerId.newInstance(appAttemptId, 1);
+    ContainerId containerId = ContainerId.newContainerId(appAttemptId, 1);
     try {
       writeContainerFinishData(containerId);
       Assert.fail();
@@ -149,14 +149,14 @@ public class TestMemoryApplicationHistoryStore extends
     writeApplicationAttemptStartData(appAttemptId);
     int numContainers = 5;
     for (int i = 1; i <= numContainers; ++i) {
-      containerId = ContainerId.newInstance(appAttemptId, i);
+      containerId = ContainerId.newContainerId(appAttemptId, i);
       writeContainerStartData(containerId);
       writeContainerFinishData(containerId);
     }
     Assert
       .assertEquals(numContainers, store.getContainers(appAttemptId).size());
     for (int i = 1; i <= numContainers; ++i) {
-      containerId = ContainerId.newInstance(appAttemptId, i);
+      containerId = ContainerId.newContainerId(appAttemptId, i);
       ContainerHistoryData data = store.getContainer(containerId);
       Assert.assertNotNull(data);
       Assert.assertEquals(Priority.newInstance(containerId.getId()),
@@ -165,11 +165,11 @@ public class TestMemoryApplicationHistoryStore extends
     }
     ContainerHistoryData masterContainer = store.getAMContainer(appAttemptId);
     Assert.assertNotNull(masterContainer);
-    Assert.assertEquals(ContainerId.newInstance(appAttemptId, 1),
+    Assert.assertEquals(ContainerId.newContainerId(appAttemptId, 1),
       masterContainer.getContainerId());
     writeApplicationAttemptFinishData(appAttemptId);
     // Write again
-    containerId = ContainerId.newInstance(appAttemptId, 1);
+    containerId = ContainerId.newContainerId(appAttemptId, 1);
     try {
       writeContainerStartData(containerId);
       Assert.fail();
@@ -195,7 +195,7 @@ public class TestMemoryApplicationHistoryStore extends
     ApplicationAttemptId appAttemptId =
         ApplicationAttemptId.newInstance(appId, 1);
     for (int i = 1; i <= numContainers; ++i) {
-      ContainerId containerId = ContainerId.newInstance(appAttemptId, i);
+      ContainerId containerId = ContainerId.newContainerId(appAttemptId, i);
       writeContainerStartData(containerId);
       writeContainerFinishData(containerId);
     }

http://git-wip-us.apache.org/repos/asf/ambari/blob/dea22be1/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/TestAHSWebServices.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/TestAHSWebServices.java b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/TestAHSWebServices.java
index e78dfcc..44b3f65 100644
--- a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/TestAHSWebServices.java
+++ b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/TestAHSWebServices.java
@@ -269,7 +269,7 @@ public class TestAHSWebServices extends JerseyTest {
     ApplicationId appId = ApplicationId.newInstance(0, 1);
     ApplicationAttemptId appAttemptId =
         ApplicationAttemptId.newInstance(appId, 1);
-    ContainerId containerId = ContainerId.newInstance(appAttemptId, 1);
+    ContainerId containerId = ContainerId.newContainerId(appAttemptId, 1);
     WebResource r = resource();
     ClientResponse response =
         r.path("ws").path("v1").path("applicationhistory").path("apps")

http://git-wip-us.apache.org/repos/asf/ambari/blob/dea22be1/ambari-server/src/main/java/org/apache/ambari/server/configuration/Configuration.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/configuration/Configuration.java b/ambari-server/src/main/java/org/apache/ambari/server/configuration/Configuration.java
index eee4b61..221b83d 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/configuration/Configuration.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/configuration/Configuration.java
@@ -184,6 +184,9 @@ public class Configuration {
   public static final String LDAP_REFERRAL_KEY = "authentication.ldap.referral";
   public static final String LDAP_PAGINATION_ENABLED_KEY = "authentication.ldap.pagination.enabled";
   public static final String SERVER_EC_CACHE_SIZE = "server.ecCacheSize";
+  public static final String SERVER_HRC_STATUS_SUMMARY_CACHE_ENABLED = "server.hrcStatusSummary.cache.enabled";
+  public static final String SERVER_HRC_STATUS_SUMMARY_CACHE_SIZE = "server.hrcStatusSummary.cache.size";
+  public static final String SERVER_HRC_STATUS_SUMMARY_CACHE_EXPIRY_DURATION = "server.hrcStatusSummary.cache.expiryDuration";
   public static final String SERVER_STALE_CONFIG_CACHE_ENABLED_KEY = "server.cache.isStale.enabled";
   public static final String SERVER_PERSISTENCE_TYPE_KEY = "server.persistence.type";
   public static final String SERVER_JDBC_USER_NAME_KEY = "server.jdbc.user.name";
@@ -278,6 +281,9 @@ public class Configuration {
   public static final String TEMPORARY_KEYSTORE_ACTIVELY_PURGE = "security.temporary.keystore.actibely.purge";
   public static final boolean TEMPORARY_KEYSTORE_ACTIVELY_PURGE_DEFAULT = true;
 
+  // Alerts notifications properties
+  public static final String AMBARI_DISPLAY_URL = "ambari.display.url";
+
   /**
    * Key for repo validation suffixes.
    */
@@ -364,6 +370,11 @@ public class Configuration {
 
   public static final String CUSTOM_ACTION_DEFINITION_KEY = "custom.action.definitions";
   public static final String SHARED_RESOURCES_DIR_KEY = "shared.resources.dir";
+
+  protected static final boolean SERVER_HRC_STATUS_SUMMARY_CACHE_ENABLED_DEFAULT = true;
+  protected static final long SERVER_HRC_STATUS_SUMMARY_CACHE_SIZE_DEFAULT = 10000L;
+  protected static final long SERVER_HRC_STATUS_SUMMARY_CACHE_EXPIRY_DURATION_DEFAULT = 30; //minutes
+
   private static final String CUSTOM_ACTION_DEFINITION_DEF_VALUE = "/var/lib/ambari-server/resources/custom_action_definitions";
 
   private static final long SERVER_EC_CACHE_SIZE_DEFAULT = 10000L;
@@ -1774,6 +1785,75 @@ public class Configuration {
   }
 
   /**
+   * Caching of host role command status summary can be enabled/disabled
+   * through the {@link #SERVER_HRC_STATUS_SUMMARY_CACHE_ENABLED} config property.
+   * This method returns the value of {@link #SERVER_HRC_STATUS_SUMMARY_CACHE_ENABLED}
+   * config property. If this config property is not defined than returns the default defined by {@link #SERVER_HRC_STATUS_SUMMARY_CACHE_ENABLED_DEFAULT}.
+   * @return true if caching is to be enabled otherwise false.
+   */
+  public boolean getHostRoleCommandStatusSummaryCacheEnabled() {
+    String stringValue = properties.getProperty(SERVER_HRC_STATUS_SUMMARY_CACHE_ENABLED);
+    boolean value = SERVER_HRC_STATUS_SUMMARY_CACHE_ENABLED_DEFAULT;
+    if (stringValue != null) {
+      try {
+        value = Boolean.valueOf(stringValue);
+      }
+      catch (NumberFormatException ignored) {
+      }
+
+    }
+
+    return value;
+  }
+
+  /**
+   * In order to avoid the cache storing host role command status summary objects exhaust
+   * memory we set a max record number allowed for the cache. This limit can be configured
+   * through {@link #SERVER_HRC_STATUS_SUMMARY_CACHE_SIZE} config property. The method returns
+   * the value of this config property. If this config property is not defined than
+   * the default value specified by {@link #SERVER_HRC_STATUS_SUMMARY_CACHE_SIZE_DEFAULT} is returned.
+   * @return the upper limit for the number of cached host role command summaries.
+   */
+  public long getHostRoleCommandStatusSummaryCacheSize() {
+    String stringValue = properties.getProperty(SERVER_HRC_STATUS_SUMMARY_CACHE_SIZE);
+    long value = SERVER_HRC_STATUS_SUMMARY_CACHE_SIZE_DEFAULT;
+    if (stringValue != null) {
+      try {
+        value = Long.valueOf(stringValue);
+      }
+      catch (NumberFormatException ignored) {
+      }
+
+    }
+
+    return value;
+  }
+
+  /**
+   * As a safety measure the cache storing host role command status summaries should auto expire after a while.
+   * The expiry duration is specified through the {@link #SERVER_HRC_STATUS_SUMMARY_CACHE_EXPIRY_DURATION} config property
+   * expressed in minutes. The method returns the value of this config property. If this config property is not defined than
+   * the default value specified by {@link #SERVER_HRC_STATUS_SUMMARY_CACHE_EXPIRY_DURATION_DEFAULT}
+   * @return the cache expiry duration in minutes
+   */
+  public long getHostRoleCommandStatusSummaryCacheExpiryDuration() {
+    String stringValue = properties.getProperty(SERVER_HRC_STATUS_SUMMARY_CACHE_EXPIRY_DURATION);
+    long value = SERVER_HRC_STATUS_SUMMARY_CACHE_EXPIRY_DURATION_DEFAULT;
+    if (stringValue != null) {
+      try {
+        value = Long.valueOf(stringValue);
+      }
+      catch (NumberFormatException ignored) {
+      }
+
+    }
+
+    return value;
+  }
+
+
+
+  /**
    * @return whether staleConfig's flag is cached.
    */
   public boolean isStaleConfigCacheEnabled() {
@@ -2501,6 +2581,15 @@ public class Configuration {
   }
 
   /**
+   * Get the ambari display URL
+   * @return
+   */
+  public String getAmbariDisplayUrl() {
+    return properties.getProperty(AMBARI_DISPLAY_URL, null);
+  }
+
+
+  /**
    * @return number of retry attempts for api and blueprint operations
    */
   public int getOperationsRetryAttempts() {

http://git-wip-us.apache.org/repos/asf/ambari/blob/dea22be1/ambari-server/src/main/java/org/apache/ambari/server/controller/ControllerModule.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/ControllerModule.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/ControllerModule.java
index 76ff6db..daca64d 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/ControllerModule.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/ControllerModule.java
@@ -65,6 +65,7 @@ import org.apache.ambari.server.notifications.NotificationDispatcher;
 import org.apache.ambari.server.orm.DBAccessor;
 import org.apache.ambari.server.orm.DBAccessorImpl;
 import org.apache.ambari.server.orm.PersistenceType;
+import org.apache.ambari.server.orm.dao.HostRoleCommandDAO;
 import org.apache.ambari.server.scheduler.ExecutionScheduler;
 import org.apache.ambari.server.scheduler.ExecutionSchedulerImpl;
 import org.apache.ambari.server.security.AmbariEntryPoint;
@@ -338,6 +339,21 @@ public class ControllerModule extends AbstractModule {
     bindConstant().annotatedWith(Names.named("executionCommandCacheSize")).
         to(configuration.getExecutionCommandsCacheSize());
 
+
+    // Host role commands status summary max cache enable/disable
+    bindConstant().annotatedWith(Names.named(HostRoleCommandDAO.HRC_STATUS_SUMMARY_CACHE_ENABLED)).
+      to(configuration.getHostRoleCommandStatusSummaryCacheEnabled());
+
+    // Host role commands status summary max cache size
+    bindConstant().annotatedWith(Names.named(HostRoleCommandDAO.HRC_STATUS_SUMMARY_CACHE_SIZE)).
+      to(configuration.getHostRoleCommandStatusSummaryCacheSize());
+    // Host role command status summary cache expiry duration in minutes
+    bindConstant().annotatedWith(Names.named(HostRoleCommandDAO.HRC_STATUS_SUMMARY_CACHE_EXPIRY_DURATION_MINUTES)).
+      to(configuration.getHostRoleCommandStatusSummaryCacheExpiryDuration());
+
+
+
+
     bind(AmbariManagementController.class).to(
       AmbariManagementControllerImpl.class);
     bind(AbstractRootServiceResponseFactory.class).to(RootServiceResponseFactory.class);

http://git-wip-us.apache.org/repos/asf/ambari/blob/dea22be1/ambari-server/src/main/java/org/apache/ambari/server/controller/ServiceComponentRequest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/ServiceComponentRequest.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/ServiceComponentRequest.java
index 78b9897..ba0b84f 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/ServiceComponentRequest.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/ServiceComponentRequest.java
@@ -31,21 +31,28 @@ public class ServiceComponentRequest {
 
   private String componentCategory;
 
+  private String recoveryEnabled; // CREATE/UPDATE
+
   public ServiceComponentRequest(String clusterName, String serviceName,
                                  String componentName, String desiredState) {
-    this.clusterName = clusterName;
-    this.serviceName = serviceName;
-    this.componentName = componentName;
-    this.desiredState = desiredState;
+    this(clusterName, serviceName, componentName, desiredState, null, null);
+  }
+
+  public ServiceComponentRequest(String clusterName, String serviceName,
+                                 String componentName, String desiredState,
+                                 String recoveryEnabled) {
+    this(clusterName, serviceName, componentName, desiredState, recoveryEnabled, null);
   }
 
   public ServiceComponentRequest(String clusterName,
                                  String serviceName, String componentName,
-                                 String desiredState, String componentCategory) {
+                                 String desiredState, String recoveryEnabled,
+                                 String componentCategory) {
     this.clusterName = clusterName;
     this.serviceName = serviceName;
     this.componentName = componentName;
     this.desiredState = desiredState;
+    this.recoveryEnabled = recoveryEnabled;
     this.componentCategory = componentCategory;
   }
 
@@ -105,6 +112,20 @@ public class ServiceComponentRequest {
     this.clusterName = clusterName;
   }
 
+  /**
+   * @return recoveryEnabled
+   */
+  public String getRecoveryEnabled() {
+    return recoveryEnabled;
+  }
+
+  /**
+   * @param recoveryEnabled the recoveryEnabled value to set.
+   */
+  public void setRecoveryEnabled(String recoveryEnabled) {
+    this.recoveryEnabled = recoveryEnabled;
+  }
+
   public String getComponentCategory() {
     return componentCategory;
   }
@@ -115,7 +136,7 @@ public class ServiceComponentRequest {
 
   @Override
   public String toString() {
-    return String.format("[clusterName=%s, serviceName=%s, componentName=%s, desiredState=%s, componentCategory=%s]",
-        clusterName, serviceName, clusterName, desiredState, componentCategory);
+    return String.format("[clusterName=%s, serviceName=%s, componentName=%s, desiredState=%s, recoveryEnabled=%s, componentCategory=%s]",
+        clusterName, serviceName, clusterName, desiredState, recoveryEnabled, componentCategory);
   }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/dea22be1/ambari-server/src/main/java/org/apache/ambari/server/controller/ServiceComponentResponse.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/ServiceComponentResponse.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/ServiceComponentResponse.java
index f7dd301..381b114 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/ServiceComponentResponse.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/ServiceComponentResponse.java
@@ -41,6 +41,8 @@ public class ServiceComponentResponse {
 
   private int installedCount;
 
+  private boolean recoveryEnabled;
+
   public ServiceComponentResponse(Long clusterId, String clusterName,
                                   String serviceName,
                                   String componentName,
@@ -48,7 +50,8 @@ public class ServiceComponentResponse {
                                   String desiredState,
                                   int totalCount,
                                   int startedCount,
-                                  int installedCount) {
+                                  int installedCount,
+                                  boolean recoveryEnabled) {
     super();
     this.clusterId = clusterId;
     this.clusterName = clusterName;
@@ -59,6 +62,7 @@ public class ServiceComponentResponse {
     this.totalCount = totalCount;
     this.startedCount = startedCount;
     this.installedCount = installedCount;
+    this.recoveryEnabled = recoveryEnabled;
   }
 
   /**
@@ -211,6 +215,22 @@ public class ServiceComponentResponse {
     this.totalCount = totalCount;
   }
 
+  /**
+   * Get a true or false value indicating if the service component is auto start enabled
+   * @return true or false
+   */
+  public boolean isRecoveryEnabled() {
+    return recoveryEnabled;
+  }
+
+  /**
+   * Set a true or false value indicating whether the service component is auto start enabled
+   * @param recoveryEnabled
+   */
+  public void setRecoveryEnabled(boolean recoveryEnabled) {
+    this.recoveryEnabled = recoveryEnabled;
+  }
+
   @Override
   public boolean equals(Object o) {
     if (this == o) return true;

http://git-wip-us.apache.org/repos/asf/ambari/blob/dea22be1/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ComponentResourceProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ComponentResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ComponentResourceProvider.java
index 3ad6e64..b339adf 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ComponentResourceProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ComponentResourceProvider.java
@@ -84,6 +84,7 @@ public class ComponentResourceProvider extends AbstractControllerResourceProvide
   protected static final String COMPONENT_TOTAL_COUNT_PROPERTY_ID     = "ServiceComponentInfo/total_count";
   protected static final String COMPONENT_STARTED_COUNT_PROPERTY_ID   = "ServiceComponentInfo/started_count";
   protected static final String COMPONENT_INSTALLED_COUNT_PROPERTY_ID = "ServiceComponentInfo/installed_count";
+  protected static final String COMPONENT_RECOVERY_ENABLED_ID         = "ServiceComponentInfo/recovery_enabled";
 
   private static final String TRUE = "true";
 
@@ -178,6 +179,7 @@ public class ComponentResourceProvider extends AbstractControllerResourceProvide
       setResourceProperty(resource, COMPONENT_TOTAL_COUNT_PROPERTY_ID, response.getTotalCount(), requestedIds);
       setResourceProperty(resource, COMPONENT_STARTED_COUNT_PROPERTY_ID, response.getStartedCount(), requestedIds);
       setResourceProperty(resource, COMPONENT_INSTALLED_COUNT_PROPERTY_ID, response.getInstalledCount(), requestedIds);
+      setResourceProperty(resource, COMPONENT_RECOVERY_ENABLED_ID, String.valueOf(response.isRecoveryEnabled()), requestedIds);
 
       resources.add(resource);
     }
@@ -251,6 +253,7 @@ public class ComponentResourceProvider extends AbstractControllerResourceProvide
         (String) properties.get(COMPONENT_SERVICE_NAME_PROPERTY_ID),
         (String) properties.get(COMPONENT_COMPONENT_NAME_PROPERTY_ID),
         (String) properties.get(COMPONENT_STATE_PROPERTY_ID),
+        (String) properties.get(COMPONENT_RECOVERY_ENABLED_ID),
         (String) properties.get(COMPONENT_CATEGORY_PROPERTY_ID));
   }
 
@@ -463,6 +466,9 @@ public class ComponentResourceProvider extends AbstractControllerResourceProvide
     Map<String, Map<String, Set<String>>> componentNames = new HashMap<>();
     Set<State> seenNewStates = new HashSet<>();
 
+    Collection<ServiceComponent> recoveryEnabledComponents = new ArrayList<>();
+    Collection<ServiceComponent> recoveryDisabledComponents = new ArrayList<>();
+
     // Determine operation level
     Resource.Type reqOpLvl;
     if (requestProperties.containsKey(RequestOperationLevel.OPERATION_LEVEL_ID)) {
@@ -513,6 +519,20 @@ public class ComponentResourceProvider extends AbstractControllerResourceProvide
         continue;
       }
 
+      // Gather the components affected by the change in
+      // auto start state
+      if (!StringUtils.isEmpty(request.getRecoveryEnabled())) {
+        boolean newRecoveryEnabled = Boolean.parseBoolean(request.getRecoveryEnabled());
+        boolean oldRecoveryEnabled = sc.isRecoveryEnabled();
+        if (newRecoveryEnabled != oldRecoveryEnabled) {
+          if (newRecoveryEnabled) {
+            recoveryEnabledComponents.add(sc);
+          } else {
+            recoveryDisabledComponents.add(sc);
+          }
+        }
+      }
+
       if (newState == null) {
         debug("Nothing to do for new updateServiceComponent request, request ={}, newDesiredState=null" + request);
         continue;
@@ -539,9 +559,11 @@ public class ComponentResourceProvider extends AbstractControllerResourceProvide
               + ", clusterId=" + cluster.getClusterId()
               + ", serviceName=" + sc.getServiceName()
               + ", componentName=" + sc.getName()
+              + ", recoveryEnabled=" + sc.isRecoveryEnabled()
               + ", currentDesiredState=" + oldScState
               + ", newDesiredState=" + newState);
         }
+
         if (!changedComps.containsKey(newState)) {
           changedComps.put(newState, new ArrayList<ServiceComponent>());
         }
@@ -549,6 +571,7 @@ public class ComponentResourceProvider extends AbstractControllerResourceProvide
               + ", clusterName=" + clusterName
               + ", serviceName=" + serviceName
               + ", componentName=" + sc.getName()
+              + ", recoveryEnabled=" + sc.isRecoveryEnabled()
               + ", currentDesiredState=" + oldScState
               + ", newDesiredState=" + newState);
 
@@ -562,6 +585,7 @@ public class ComponentResourceProvider extends AbstractControllerResourceProvide
                 + ", clusterName=" + clusterName
                 + ", serviceName=" + serviceName
                 + ", componentName=" + sc.getName()
+                + ", recoveryEnabled=" + sc.isRecoveryEnabled()
                 + ", hostname=" + sch.getHostName()
                 + ", currentState=" + oldSchState
                 + ", newDesiredState=" + newState);
@@ -574,6 +598,7 @@ public class ComponentResourceProvider extends AbstractControllerResourceProvide
                 + ", clusterName=" + clusterName
                 + ", serviceName=" + serviceName
                 + ", componentName=" + sc.getName()
+                + ", recoveryEnabled=" + sc.isRecoveryEnabled()
                 + ", hostname=" + sch.getHostName()
                 + ", currentState=" + oldSchState
                 + ", newDesiredState=" + newState);
@@ -587,6 +612,7 @@ public class ComponentResourceProvider extends AbstractControllerResourceProvide
                 + ", clusterName=" + clusterName
                 + ", serviceName=" + serviceName
                 + ", componentName=" + sc.getName()
+                + ", recoveryEnabled=" + sc.isRecoveryEnabled()
                 + ", hostname=" + sch.getHostName());
 
           continue;
@@ -600,6 +626,7 @@ public class ComponentResourceProvider extends AbstractControllerResourceProvide
               + ", clusterId=" + cluster.getClusterId()
               + ", serviceName=" + sch.getServiceName()
               + ", componentName=" + sch.getServiceComponentName()
+              + ", recoveryEnabled=" + sc.isRecoveryEnabled()
               + ", hostname=" + sch.getHostName()
               + ", currentState=" + oldSchState
               + ", newDesiredState=" + newState);
@@ -615,6 +642,7 @@ public class ComponentResourceProvider extends AbstractControllerResourceProvide
               + ", clusterName=" + clusterName
               + ", serviceName=" + serviceName
               + ", componentName=" + sc.getName()
+              + ", recoveryEnabled=" + sc.isRecoveryEnabled()
               + ", hostname=" + sch.getHostName()
               + ", currentState=" + oldSchState
               + ", newDesiredState=" + newState);
@@ -628,6 +656,16 @@ public class ComponentResourceProvider extends AbstractControllerResourceProvide
 
     // TODO additional validation?
 
+    // Validations completed. Update the affected service components now.
+
+    for (ServiceComponent sc : recoveryEnabledComponents) {
+      sc.setRecoveryEnabled(true);
+    }
+
+    for (ServiceComponent sc : recoveryDisabledComponents) {
+      sc.setRecoveryEnabled(false);
+    }
+
     Cluster cluster = clusters.getCluster(clusterNames.iterator().next());
 
     return getManagementController().createAndPersistStages(cluster, requestProperties, null, null, changedComps, changedScHosts,

http://git-wip-us.apache.org/repos/asf/ambari/blob/dea22be1/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/HostRoleCommandDAO.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/HostRoleCommandDAO.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/HostRoleCommandDAO.java
index 4fd03e5..deca9b1 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/HostRoleCommandDAO.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/HostRoleCommandDAO.java
@@ -18,9 +18,6 @@
 
 package org.apache.ambari.server.orm.dao;
 
-import static org.apache.ambari.server.orm.DBAccessor.DbType.ORACLE;
-import static org.apache.ambari.server.orm.dao.DaoUtils.ORACLE_LIST_LIMIT;
-
 import java.text.MessageFormat;
 import java.util.ArrayList;
 import java.util.Collection;
@@ -28,6 +25,7 @@ import java.util.Collections;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
+import java.util.concurrent.TimeUnit;
 
 import javax.persistence.EntityManager;
 import javax.persistence.TypedQuery;
@@ -49,16 +47,27 @@ import org.apache.ambari.server.orm.entities.HostEntity;
 import org.apache.ambari.server.orm.entities.HostRoleCommandEntity;
 import org.apache.ambari.server.orm.entities.HostRoleCommandEntity_;
 import org.apache.ambari.server.orm.entities.StageEntity;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
+import com.google.common.cache.CacheBuilder;
+import com.google.common.cache.CacheLoader;
+import com.google.common.cache.LoadingCache;
 import com.google.common.collect.Lists;
 import com.google.inject.Inject;
 import com.google.inject.Provider;
 import com.google.inject.Singleton;
+import com.google.inject.name.Named;
 import com.google.inject.persist.Transactional;
 
+import static org.apache.ambari.server.orm.DBAccessor.DbType.ORACLE;
+import static org.apache.ambari.server.orm.dao.DaoUtils.ORACLE_LIST_LIMIT;
+
 @Singleton
 public class HostRoleCommandDAO {
 
+  private static final Logger LOG = LoggerFactory.getLogger(HostRoleCommandDAO.class);
+
   private static final String SUMMARY_DTO = String.format(
     "SELECT NEW %s(" +
       "MAX(hrc.stage.skippable), " +
@@ -92,12 +101,122 @@ public class HostRoleCommandDAO {
    */
   private static final String COMPLETED_REQUESTS_SQL = "SELECT DISTINCT task.requestId FROM HostRoleCommandEntity task WHERE task.requestId NOT IN (SELECT task.requestId FROM HostRoleCommandEntity task WHERE task.status IN :notCompletedStatuses) ORDER BY task.requestId {0}";
 
+  /**
+   * A cache that holds {@link HostRoleCommandStatusSummaryDTO} grouped by stage id for requests by request id.
+   * The JPQL computing the host role command status summary for a request is rather expensive
+   * thus this cache helps reducing the load on the database
+   */
+  private final LoadingCache<Long, Map<Long, HostRoleCommandStatusSummaryDTO>> hrcStatusSummaryCache;
+
+  /**
+   * Specifies whether caching for {@link HostRoleCommandStatusSummaryDTO} grouped by stage id for requests
+   * is enabled.
+   */
+  private final boolean hostRoleCommandStatusSummaryCacheEnabled;
+
+
   @Inject
   Provider<EntityManager> entityManagerProvider;
 
   @Inject
   DaoUtils daoUtils;
 
+  public final static String HRC_STATUS_SUMMARY_CACHE_SIZE =  "hostRoleCommandStatusSummaryCacheSize";
+  public final static String HRC_STATUS_SUMMARY_CACHE_EXPIRY_DURATION_MINUTES = "hostRoleCommandStatusCacheExpiryDurationMins";
+  public final static String HRC_STATUS_SUMMARY_CACHE_ENABLED =  "hostRoleCommandStatusSummaryCacheEnabled";
+
+  /**
+   * Invalidates the host role command status summary cache entry that corresponds to the given request.
+   * @param requestId the key of the cache entry to be invalidated.
+   */
+  protected void invalidateHostRoleCommandStatusSummaryCache(Long requestId) {
+    if (!hostRoleCommandStatusSummaryCacheEnabled )
+      return;
+
+    LOG.debug("Invalidating host role command status summary cache for request {} !", requestId);
+    hrcStatusSummaryCache.invalidate(requestId);
+
+  }
+
+  /**
+   * Invalidates those entries in host role command status cache which are dependent on the passed {@link org.apache.ambari.server.orm.entities.HostRoleCommandEntity}
+   * entity.
+   * @param hostRoleCommandEntity
+   */
+  protected void invalidateHostRoleCommandStatusCache(HostRoleCommandEntity hostRoleCommandEntity) {
+    if ( !hostRoleCommandStatusSummaryCacheEnabled )
+      return;
+
+    if (hostRoleCommandEntity != null) {
+      Long requestId = hostRoleCommandEntity.getRequestId();
+      if (requestId == null) {
+        StageEntity stageEntity = hostRoleCommandEntity.getStage();
+        if (stageEntity != null)
+          requestId = stageEntity.getRequestId();
+      }
+
+      if (requestId != null)
+        invalidateHostRoleCommandStatusSummaryCache(requestId.longValue());
+    }
+
+  }
+
+  /**
+   * Loads the counts of tasks for a request and groups them by stage id.
+   * This allows for very efficient loading when there are a huge number of stages
+   * and tasks to iterate (for example, during a Stack Upgrade).
+   * @param requestId the request id
+   * @return the map of stage-to-summary objects
+   */
+  @RequiresSession
+  protected Map<Long, HostRoleCommandStatusSummaryDTO> loadAggregateCounts(Long requestId) {
+
+    TypedQuery<HostRoleCommandStatusSummaryDTO> query = entityManagerProvider.get().createQuery(
+      SUMMARY_DTO, HostRoleCommandStatusSummaryDTO.class);
+
+    query.setParameter("requestId", requestId);
+    query.setParameter("aborted", HostRoleStatus.ABORTED);
+    query.setParameter("completed", HostRoleStatus.COMPLETED);
+    query.setParameter("failed", HostRoleStatus.FAILED);
+    query.setParameter("holding", HostRoleStatus.HOLDING);
+    query.setParameter("holding_failed", HostRoleStatus.HOLDING_FAILED);
+    query.setParameter("holding_timedout", HostRoleStatus.HOLDING_TIMEDOUT);
+    query.setParameter("in_progress", HostRoleStatus.IN_PROGRESS);
+    query.setParameter("pending", HostRoleStatus.PENDING);
+    query.setParameter("queued", HostRoleStatus.QUEUED);
+    query.setParameter("timedout", HostRoleStatus.TIMEDOUT);
+    query.setParameter("skipped_failed", HostRoleStatus.SKIPPED_FAILED);
+
+    Map<Long, HostRoleCommandStatusSummaryDTO> map = new HashMap<Long, HostRoleCommandStatusSummaryDTO>();
+
+    for (HostRoleCommandStatusSummaryDTO dto : daoUtils.selectList(query)) {
+      map.put(dto.getStageId(), dto);
+    }
+
+    return map;
+  }
+
+  @Inject
+  public HostRoleCommandDAO(@Named(HRC_STATUS_SUMMARY_CACHE_ENABLED) boolean hostRoleCommandStatusSummaryCacheEnabled, @Named(HRC_STATUS_SUMMARY_CACHE_SIZE) long hostRoleCommandStatusSummaryCacheLimit, @Named(HRC_STATUS_SUMMARY_CACHE_EXPIRY_DURATION_MINUTES) long hostRoleCommandStatusSummaryCacheExpiryDurationMins) {
+    this.hostRoleCommandStatusSummaryCacheEnabled = hostRoleCommandStatusSummaryCacheEnabled;
+
+    LOG.info("Host role command status summary cache {} !", hostRoleCommandStatusSummaryCacheEnabled ? "enabled" : "disabled");
+
+
+    hrcStatusSummaryCache = CacheBuilder.newBuilder()
+      .maximumSize(hostRoleCommandStatusSummaryCacheLimit)
+      .expireAfterAccess(hostRoleCommandStatusSummaryCacheExpiryDurationMins, TimeUnit.MINUTES)
+      .build(new CacheLoader<Long, Map<Long, HostRoleCommandStatusSummaryDTO>>() {
+        @Override
+        public Map<Long, HostRoleCommandStatusSummaryDTO> load(Long requestId) throws Exception {
+          LOG.debug("Cache miss for host role command status summary object for request {}, fetching from JPA", requestId);
+          Map<Long, HostRoleCommandStatusSummaryDTO> hrcCommandStatusByStageId = loadAggregateCounts(requestId);
+
+          return hrcCommandStatusByStageId;
+        }
+      });
+  }
+
   @RequiresSession
   public HostRoleCommandEntity findByPK(long taskId) {
     return entityManagerProvider.get().find(HostRoleCommandEntity.class, taskId);
@@ -425,11 +544,16 @@ public class HostRoleCommandDAO {
   @Transactional
   public void create(HostRoleCommandEntity stageEntity) {
     entityManagerProvider.get().persist(stageEntity);
+
+    invalidateHostRoleCommandStatusCache(stageEntity);
   }
 
   @Transactional
   public HostRoleCommandEntity merge(HostRoleCommandEntity stageEntity) {
     HostRoleCommandEntity entity = entityManagerProvider.get().merge(stageEntity);
+
+    invalidateHostRoleCommandStatusCache(entity);
+
     return entity;
   }
 
@@ -446,6 +570,8 @@ public class HostRoleCommandDAO {
     List<HostRoleCommandEntity> managedList = new ArrayList<HostRoleCommandEntity>(entities.size());
     for (HostRoleCommandEntity entity : entities) {
       managedList.add(entityManagerProvider.get().merge(entity));
+
+      invalidateHostRoleCommandStatusCache(entity);
     }
     return managedList;
   }
@@ -453,6 +579,8 @@ public class HostRoleCommandDAO {
   @Transactional
   public void remove(HostRoleCommandEntity stageEntity) {
     entityManagerProvider.get().remove(merge(stageEntity));
+
+    invalidateHostRoleCommandStatusCache(stageEntity);
   }
 
   @Transactional
@@ -463,39 +591,17 @@ public class HostRoleCommandDAO {
 
   /**
    * Finds the counts of tasks for a request and groups them by stage id.
-   * This allows for very efficient loading when there are a huge number of stages
-   * and tasks to iterate (for example, during a Stack Upgrade).
    * @param requestId the request id
    * @return the map of stage-to-summary objects
    */
-  @RequiresSession
   public Map<Long, HostRoleCommandStatusSummaryDTO> findAggregateCounts(Long requestId) {
-
-    TypedQuery<HostRoleCommandStatusSummaryDTO> query = entityManagerProvider.get().createQuery(
-        SUMMARY_DTO, HostRoleCommandStatusSummaryDTO.class);
-
-    query.setParameter("requestId", requestId);
-    query.setParameter("aborted", HostRoleStatus.ABORTED);
-    query.setParameter("completed", HostRoleStatus.COMPLETED);
-    query.setParameter("failed", HostRoleStatus.FAILED);
-    query.setParameter("holding", HostRoleStatus.HOLDING);
-    query.setParameter("holding_failed", HostRoleStatus.HOLDING_FAILED);
-    query.setParameter("holding_timedout", HostRoleStatus.HOLDING_TIMEDOUT);
-    query.setParameter("in_progress", HostRoleStatus.IN_PROGRESS);
-    query.setParameter("pending", HostRoleStatus.PENDING);
-    query.setParameter("queued", HostRoleStatus.QUEUED);
-    query.setParameter("timedout", HostRoleStatus.TIMEDOUT);
-    query.setParameter("skipped_failed", HostRoleStatus.SKIPPED_FAILED);
-
-    Map<Long, HostRoleCommandStatusSummaryDTO> map = new HashMap<Long, HostRoleCommandStatusSummaryDTO>();
-
-    for (HostRoleCommandStatusSummaryDTO dto : daoUtils.selectList(query)) {
-      map.put(dto.getStageId(), dto);
-    }
-
-    return map;
+    if (hostRoleCommandStatusSummaryCacheEnabled)
+      return hrcStatusSummaryCache.getUnchecked(requestId);
+    else
+      return loadAggregateCounts(requestId); // if caching not enabled fall back to fetching through JPA
   }
 
+
   /**
    * Updates the {@link HostRoleCommandEntity#isFailureAutoSkipped()} flag for
    * all commands for the given request.

http://git-wip-us.apache.org/repos/asf/ambari/blob/dea22be1/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/HostRoleCommandEntity.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/HostRoleCommandEntity.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/HostRoleCommandEntity.java
index af71c40..1674175 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/HostRoleCommandEntity.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/HostRoleCommandEntity.java
@@ -32,6 +32,7 @@ import javax.persistence.FetchType;
 import javax.persistence.GeneratedValue;
 import javax.persistence.GenerationType;
 import javax.persistence.Id;
+import javax.persistence.Index;
 import javax.persistence.JoinColumn;
 import javax.persistence.JoinColumns;
 import javax.persistence.Lob;
@@ -48,7 +49,11 @@ import org.apache.ambari.server.actionmanager.HostRoleStatus;
 import org.apache.commons.lang.ArrayUtils;
 
 @Entity
-@Table(name = "host_role_command")
+@Table(name = "host_role_command"
+       , indexes = {
+           @Index(name = "idx_hrc_request_id", columnList = "request_id")
+         , @Index(name = "idx_hrc_status_role", columnList = "status, role")
+       })
 @TableGenerator(name = "host_role_command_id_generator",
     table = "ambari_sequences", pkColumnName = "sequence_name", valueColumnName = "sequence_value"
     , pkColumnValue = "host_role_command_id_seq"

http://git-wip-us.apache.org/repos/asf/ambari/blob/dea22be1/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ServiceComponentDesiredStateEntity.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ServiceComponentDesiredStateEntity.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ServiceComponentDesiredStateEntity.java
index 65cc107..519e4e6 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ServiceComponentDesiredStateEntity.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ServiceComponentDesiredStateEntity.java
@@ -81,6 +81,9 @@ public class ServiceComponentDesiredStateEntity {
   @Enumerated(EnumType.STRING)
   private State desiredState = State.INIT;
 
+  @Column(name = "recovery_enabled", nullable = false, insertable = true, updatable = true)
+  private Integer recoveryEnabled = 0;
+
   /**
    * Unidirectional one-to-one association to {@link StackEntity}
    */
@@ -180,6 +183,14 @@ public class ServiceComponentDesiredStateEntity {
     return serviceComponentHistory;
   }
 
+  public boolean isRecoveryEnabled() {
+    return recoveryEnabled != 0;
+  }
+
+  public void setRecoveryEnabled(boolean recoveryEnabled) {
+    this.recoveryEnabled = (recoveryEnabled == false) ? 0 : 1;
+  }
+
   @Override
   public boolean equals(Object o) {
     if (this == o) {

http://git-wip-us.apache.org/repos/asf/ambari/blob/dea22be1/ambari-server/src/main/java/org/apache/ambari/server/serveraction/ServerActionExecutor.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/ServerActionExecutor.java b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/ServerActionExecutor.java
index 20cf5bb..f93cf43 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/ServerActionExecutor.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/ServerActionExecutor.java
@@ -392,17 +392,8 @@ public class ServerActionExecutor {
    * @throws InterruptedException
    */
   public void doWork() throws InterruptedException {
-    List<HostRoleCommand> tasks = db.getTasksByHostRoleAndStatus(serverHostName,
-        Role.AMBARI_SERVER_ACTION.toString(), HostRoleStatus.QUEUED);
-
-    if (null == tasks || tasks.isEmpty()) {
-      // !!! if the server is not a part of the cluster,
-      // !!! just look for anything designated AMBARI_SERVER_ACTION.
-      // !!! do we even need to worry about servername in the first place?  We're
-      // !!! _on_ the ambari server!
-      tasks = db.getTasksByRoleAndStatus(Role.AMBARI_SERVER_ACTION.name(),
-          HostRoleStatus.QUEUED);
-    }
+    List<HostRoleCommand> tasks = db.getTasksByRoleAndStatus(Role.AMBARI_SERVER_ACTION.name(),
+      HostRoleStatus.QUEUED);
 
     if ((tasks != null) && !tasks.isEmpty()) {
       for (HostRoleCommand task : tasks) {

http://git-wip-us.apache.org/repos/asf/ambari/blob/dea22be1/ambari-server/src/main/java/org/apache/ambari/server/state/ServiceComponent.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/ServiceComponent.java b/ambari-server/src/main/java/org/apache/ambari/server/state/ServiceComponent.java
index 7803045..dcb7cf6 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/ServiceComponent.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/ServiceComponent.java
@@ -28,6 +28,20 @@ public interface ServiceComponent {
 
   String getName();
 
+  /**
+   * Get a true or false value specifying
+   * if auto start was enabled for this component.
+   * @return true or false
+   */
+  boolean isRecoveryEnabled();
+
+  /**
+   * Set a true or false value specifying if this
+   * component is to be enabled for auto start or not.
+   * @param recoveryEnabled - true or false
+   */
+  void setRecoveryEnabled(boolean recoveryEnabled);
+
   String getServiceName();
 
   long getClusterId();

http://git-wip-us.apache.org/repos/asf/ambari/blob/dea22be1/ambari-server/src/main/java/org/apache/ambari/server/state/ServiceComponentImpl.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/ServiceComponentImpl.java b/ambari-server/src/main/java/org/apache/ambari/server/state/ServiceComponentImpl.java
index 4afc857..defe808 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/ServiceComponentImpl.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/ServiceComponentImpl.java
@@ -95,6 +95,7 @@ public class ServiceComponentImpl implements ServiceComponent {
     desiredStateEntity.setDesiredState(State.INIT);
     desiredStateEntity.setServiceName(service.getName());
     desiredStateEntity.setClusterId(service.getClusterId());
+    desiredStateEntity.setRecoveryEnabled(false);
 
     setDesiredStackVersion(service.getDesiredStackVersion());
 
@@ -181,6 +182,55 @@ public class ServiceComponentImpl implements ServiceComponent {
     return componentName;
   }
 
+  /**
+   * Get the recoveryEnabled value.
+   *
+   * @return true or false
+   */
+  @Override
+  public boolean isRecoveryEnabled() {
+    ServiceComponentDesiredStateEntity desiredStateEntity = getDesiredStateEntity();
+    if (desiredStateEntity != null) {
+      return desiredStateEntity.isRecoveryEnabled();
+    } else {
+      LOG.warn("Trying to fetch a member from an entity object that may " +
+              "have been previously deleted, serviceName = " + service.getName() + ", " +
+              "componentName = " + componentName);
+    }
+    return false;
+  }
+
+  /**
+   * Set the recoveryEnabled field in the entity object.
+   *
+   * @param recoveryEnabled - true or false
+   */
+  @Override
+  public void setRecoveryEnabled(boolean recoveryEnabled) {
+    readWriteLock.writeLock().lock();
+    try {
+      if (LOG.isDebugEnabled()) {
+        LOG.debug("Setting RecoveryEnabled of Component" + ", clusterName="
+                + service.getCluster().getClusterName() + ", clusterId="
+                + service.getCluster().getClusterId() + ", serviceName="
+                + service.getName() + ", componentName=" + getName()
+                + ", oldRecoveryEnabled=" + isRecoveryEnabled() + ", newRecoveryEnabled="
+                + recoveryEnabled);
+      }
+      ServiceComponentDesiredStateEntity desiredStateEntity = getDesiredStateEntity();
+      if (desiredStateEntity != null) {
+        desiredStateEntity.setRecoveryEnabled(recoveryEnabled);
+        saveIfPersisted(desiredStateEntity);
+      } else {
+        LOG.warn("Setting a member on an entity object that may have been " +
+                "previously deleted, serviceName = " + service.getName());
+      }
+
+    } finally {
+      readWriteLock.writeLock().unlock();
+    }
+  }
+
   @Override
   public String getServiceName() {
     return service.getName();
@@ -247,7 +297,8 @@ public class ServiceComponentImpl implements ServiceComponent {
               + ", clusterId=" + service.getCluster().getClusterId()
               + ", serviceName=" + service.getName()
               + ", serviceComponentName=" + getName()
-              + ", hostname=" + hostComponent.getHostName());
+              + ", hostname=" + hostComponent.getHostName()
+              + ", recoveryEnabled=" + isRecoveryEnabled());
         }
         if (hostComponents.containsKey(hostComponent.getHostName())) {
           throw new AmbariException("Cannot add duplicate ServiceComponentHost"
@@ -255,7 +306,8 @@ public class ServiceComponentImpl implements ServiceComponent {
               + ", clusterId=" + service.getCluster().getClusterId()
               + ", serviceName=" + service.getName()
               + ", serviceComponentName=" + getName()
-              + ", hostname=" + hostComponent.getHostName());
+              + ", hostname=" + hostComponent.getHostName()
+              + ", recoveryEnabled=" + isRecoveryEnabled());
         }
         // FIXME need a better approach of caching components by host
         ClusterImpl clusterImpl = (ClusterImpl) service.getCluster();
@@ -283,6 +335,7 @@ public class ServiceComponentImpl implements ServiceComponent {
               + ", clusterId=" + service.getCluster().getClusterId()
               + ", serviceName=" + service.getName()
               + ", serviceComponentName=" + getName()
+              + ", recoveryEnabled=" + isRecoveryEnabled()
               + ", hostname=" + hostName);
         }
         if (hostComponents.containsKey(hostName)) {
@@ -291,6 +344,7 @@ public class ServiceComponentImpl implements ServiceComponent {
               + ", clusterId=" + service.getCluster().getClusterId()
               + ", serviceName=" + service.getName()
               + ", serviceComponentName=" + getName()
+              + ", recoveryEnabled=" + isRecoveryEnabled()
               + ", hostname=" + hostName);
         }
         ServiceComponentHost hostComponent = serviceComponentHostFactory.createNew(this, hostName);
@@ -354,11 +408,11 @@ public class ServiceComponentImpl implements ServiceComponent {
     try {
       if (LOG.isDebugEnabled()) {
         LOG.debug("Setting DesiredState of Service" + ", clusterName="
-            + service.getCluster().getClusterName() + ", clusterId="
-            + service.getCluster().getClusterId() + ", serviceName="
-            + service.getName() + ", serviceComponentName=" + getName()
-            + ", oldDesiredState=" + getDesiredState() + ", newDesiredState="
-            + state);
+                + service.getCluster().getClusterName() + ", clusterId="
+                + service.getCluster().getClusterId() + ", serviceName="
+                + service.getName() + ", serviceComponentName=" + getName()
+                + ", oldDesiredState=" + getDesiredState() + ", newDesiredState="
+                + state);
       }
       ServiceComponentDesiredStateEntity desiredStateEntity = getDesiredStateEntity();
       if (desiredStateEntity != null) {
@@ -428,7 +482,8 @@ public class ServiceComponentImpl implements ServiceComponent {
       ServiceComponentResponse r = new ServiceComponentResponse(getClusterId(),
           cluster.getClusterName(), service.getName(), getName(),
           getDesiredStackVersion().getStackId(), getDesiredState().toString(),
-          getTotalCount(), getStartedCount(), getInstalledCount());
+          getTotalCount(), getStartedCount(), getInstalledCount(),
+          isRecoveryEnabled());
       return r;
     } finally {
       readWriteLock.readLock().unlock();
@@ -440,11 +495,13 @@ public class ServiceComponentImpl implements ServiceComponent {
     return service.getCluster().getClusterName();
   }
 
+
   @Override
   public void debugDump(StringBuilder sb) {
     readWriteLock.readLock().lock();
     try {
       sb.append("ServiceComponent={ serviceComponentName=" + getName()
+          + ", recoveryEnabled=" + isRecoveryEnabled()
           + ", clusterName=" + service.getCluster().getClusterName()
           + ", clusterId=" + service.getCluster().getClusterId()
           + ", serviceName=" + service.getName() + ", desiredStackVersion="
@@ -592,6 +649,7 @@ public class ServiceComponentImpl implements ServiceComponent {
                 + ", clusterName=" + getClusterName()
                 + ", serviceName=" + getServiceName()
                 + ", componentName=" + getName()
+                + ", recoveryEnabled=" + isRecoveryEnabled()
                 + ", hostname=" + sch.getHostName());
             return false;
           }
@@ -615,7 +673,8 @@ public class ServiceComponentImpl implements ServiceComponent {
         LOG.info("Deleting all servicecomponenthosts for component"
             + ", clusterName=" + getClusterName()
             + ", serviceName=" + getServiceName()
-            + ", componentName=" + getName());
+            + ", componentName=" + getName()
+            + ", recoveryEnabled=" + isRecoveryEnabled());
         for (ServiceComponentHost sch : hostComponents.values()) {
           if (!sch.canBeRemoved()) {
             throw new AmbariException("Found non removable hostcomponent "
@@ -624,6 +683,7 @@ public class ServiceComponentImpl implements ServiceComponent {
                 + ", clusterName=" + getClusterName()
                 + ", serviceName=" + getServiceName()
                 + ", componentName=" + getName()
+                + ", recoveryEnabled=" + isRecoveryEnabled()
                 + ", hostname=" + sch.getHostName());
           }
         }
@@ -652,12 +712,14 @@ public class ServiceComponentImpl implements ServiceComponent {
             + ", clusterName=" + getClusterName()
             + ", serviceName=" + getServiceName()
             + ", componentName=" + getName()
+            + ", recoveryEnabled=" + isRecoveryEnabled()
             + ", hostname=" + sch.getHostName());
         if (!sch.canBeRemoved()) {
           throw new AmbariException("Could not delete hostcomponent from cluster"
               + ", clusterName=" + getClusterName()
               + ", serviceName=" + getServiceName()
               + ", componentName=" + getName()
+              + ", recoveryEnabled=" + isRecoveryEnabled()
               + ", hostname=" + sch.getHostName());
         }
         sch.delete();

http://git-wip-us.apache.org/repos/asf/ambari/blob/dea22be1/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClusterImpl.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClusterImpl.java b/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClusterImpl.java
index 8230fe3..07addfc 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClusterImpl.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClusterImpl.java
@@ -2474,13 +2474,31 @@ public class ClusterImpl implements Cluster {
     clusterGlobalLock.readLock().lock();
     try {
       List<ServiceConfigVersionResponse> serviceConfigVersionResponses = new ArrayList<ServiceConfigVersionResponse>();
-      Set<Long> activeIds = getActiveServiceConfigVersionIds();
 
-      for (ServiceConfigEntity serviceConfigEntity : serviceConfigDAO.getServiceConfigs(getClusterId())) {
+      List<ServiceConfigEntity> serviceConfigs = serviceConfigDAO.getServiceConfigs(getClusterId());
+      Map<String, ServiceConfigVersionResponse> activeServiceConfigResponses = new HashMap<>();
+
+      for (ServiceConfigEntity serviceConfigEntity : serviceConfigs) {
         ServiceConfigVersionResponse serviceConfigVersionResponse = convertToServiceConfigVersionResponse(serviceConfigEntity);
 
+        ServiceConfigVersionResponse activeServiceConfigResponse = activeServiceConfigResponses.get(serviceConfigVersionResponse.getServiceName());
+        if (activeServiceConfigResponse == null) {
+          activeServiceConfigResponse = serviceConfigVersionResponse;
+          activeServiceConfigResponses.put(serviceConfigVersionResponse.getServiceName(), serviceConfigVersionResponse);
+        }
+
         serviceConfigVersionResponse.setConfigurations(new ArrayList<ConfigurationResponse>());
-        serviceConfigVersionResponse.setIsCurrent(activeIds.contains(serviceConfigEntity.getServiceConfigId()));
+
+        if (serviceConfigEntity.getGroupId() == null) {
+          if (serviceConfigVersionResponse.getCreateTime() > activeServiceConfigResponse.getCreateTime())
+            activeServiceConfigResponses.put(serviceConfigVersionResponse.getServiceName(), serviceConfigVersionResponse);
+        }
+        else if (clusterConfigGroups != null && clusterConfigGroups.containsKey(serviceConfigEntity.getGroupId())){
+          if (serviceConfigVersionResponse.getVersion() > activeServiceConfigResponse.getVersion())
+            activeServiceConfigResponses.put(serviceConfigVersionResponse.getServiceName(), serviceConfigVersionResponse);
+        }
+
+        serviceConfigVersionResponse.setIsCurrent(false);
 
         List<ClusterConfigEntity> clusterConfigEntities = serviceConfigEntity.getClusterConfigEntities();
         for (ClusterConfigEntity clusterConfigEntity : clusterConfigEntities) {
@@ -2496,6 +2514,10 @@ public class ClusterImpl implements Cluster {
         serviceConfigVersionResponses.add(serviceConfigVersionResponse);
       }
 
+      for (ServiceConfigVersionResponse serviceConfigVersionResponse: activeServiceConfigResponses.values()) {
+        serviceConfigVersionResponse.setIsCurrent(true);
+      }
+
       return serviceConfigVersionResponses;
     } finally {
       clusterGlobalLock.readLock().unlock();
@@ -2514,14 +2536,6 @@ public class ClusterImpl implements Cluster {
     return responses;
   }
 
-  private Set<Long> getActiveServiceConfigVersionIds() {
-    Set<Long> idSet = new HashSet<Long>();
-    for (ServiceConfigEntity entity : getActiveServiceConfigVersionEntities()) {
-      idSet.add(entity.getServiceConfigId());
-    }
-    return idSet;
-  }
-
   private List<ServiceConfigEntity> getActiveServiceConfigVersionEntities() {
 
     List<ServiceConfigEntity> activeServiceConfigVersions = new ArrayList<ServiceConfigEntity>();

http://git-wip-us.apache.org/repos/asf/ambari/blob/dea22be1/ambari-server/src/main/java/org/apache/ambari/server/state/services/AlertNoticeDispatchService.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/services/AlertNoticeDispatchService.java b/ambari-server/src/main/java/org/apache/ambari/server/state/services/AlertNoticeDispatchService.java
index a27bc1d..0b84568 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/services/AlertNoticeDispatchService.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/services/AlertNoticeDispatchService.java
@@ -451,7 +451,7 @@ public class AlertNoticeDispatchService extends AbstractScheduledService {
     String targetType = target.getNotificationType();
 
     // build the velocity objects for template rendering
-    AmbariInfo ambari = new AmbariInfo(m_metaInfo.get());
+    AmbariInfo ambari = new AmbariInfo(m_metaInfo.get(), m_configuration);
     AlertSummaryInfo summary = new AlertSummaryInfo(histories);
     DispatchInfo dispatch = new DispatchInfo(target);
 
@@ -516,7 +516,7 @@ public class AlertNoticeDispatchService extends AbstractScheduledService {
     String targetType = target.getNotificationType();
 
     // build the velocity objects for template rendering
-    AmbariInfo ambari = new AmbariInfo(m_metaInfo.get());
+    AmbariInfo ambari = new AmbariInfo(m_metaInfo.get(), m_configuration);
     AlertInfo alert = new AlertInfo(history);
     DispatchInfo dispatch = new DispatchInfo(target);
 
@@ -558,6 +558,10 @@ public class AlertNoticeDispatchService extends AbstractScheduledService {
         bodyWriter.write(alert.getAlertName());
         bodyWriter.write(" ");
         bodyWriter.write(alert.getAlertText());
+        if (alert.hasHostName()) {
+          bodyWriter.write(" ");
+          bodyWriter.append(alert.getHostName());
+        }
         bodyWriter.write("\n");
       }
     }
@@ -1042,7 +1046,8 @@ public class AlertNoticeDispatchService extends AbstractScheduledService {
      *
      * @param metaInfo
      */
-    protected AmbariInfo(AmbariMetaInfo metaInfo) {
+    protected AmbariInfo(AmbariMetaInfo metaInfo, Configuration m_configuration) {
+      m_url = m_configuration.getAmbariDisplayUrl();
       m_version = metaInfo.getServerVersion();
     }
 
@@ -1053,6 +1058,10 @@ public class AlertNoticeDispatchService extends AbstractScheduledService {
       return m_hostName;
     }
 
+    public boolean hasUrl() {
+      return m_url != null;
+    }
+
     /**
      * @return the url
      */
@@ -1193,4 +1202,4 @@ public class AlertNoticeDispatchService extends AbstractScheduledService {
       return m_body;
     }
   }
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/dea22be1/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog222.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog222.java b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog222.java
index 88b3151..0aa1e7a 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog222.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog222.java
@@ -115,6 +115,7 @@ public class UpgradeCatalog222 extends AbstractUpgradeCatalog {
     updateAlerts();
     updateStormConfigs();
     updateAMSConfigs();
+    updateHostRoleCommands();
   }
 
   protected void updateStormConfigs() throws  AmbariException {
@@ -153,6 +154,10 @@ public class UpgradeCatalog222 extends AbstractUpgradeCatalog {
 
   }
 
+  protected void updateHostRoleCommands() throws SQLException{
+    dbAccessor.createIndex("idx_hrc_status", "host_role_command", "status", "role");
+  }
+
   protected void updateAMSConfigs() throws AmbariException {
     AmbariManagementController ambariManagementController = injector.getInstance(AmbariManagementController.class);
     Clusters clusters = ambariManagementController.getClusters();

http://git-wip-us.apache.org/repos/asf/ambari/blob/dea22be1/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog240.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog240.java b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog240.java
index 09f31e4..4e99c89 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog240.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog240.java
@@ -32,6 +32,7 @@ import java.util.concurrent.atomic.AtomicLong;
 
 import org.apache.ambari.server.AmbariException;
 import org.apache.ambari.server.controller.AmbariManagementController;
+import org.apache.ambari.server.orm.DBAccessor;
 import org.apache.ambari.server.orm.DBAccessor.DBColumnInfo;
 import org.apache.ambari.server.orm.dao.AlertDefinitionDAO;
 import org.apache.ambari.server.orm.dao.DaoUtils;
@@ -89,6 +90,8 @@ public class UpgradeCatalog240 extends AbstractUpgradeCatalog {
   private static final String ID = "id";
   private static final String SETTING_TABLE = "setting";
 
+  protected static final String SERVICE_COMPONENT_DESIRED_STATE_TABLE = "servicecomponentdesiredstate";
+  protected static final String RECOVERY_ENABLED_COL = "recovery_enabled";
 
   // ----- Constructors ------------------------------------------------------
 
@@ -127,6 +130,7 @@ public class UpgradeCatalog240 extends AbstractUpgradeCatalog {
   @Override
   protected void executeDDLUpdates() throws AmbariException, SQLException {
     updateAdminPermissionTable();
+    updateServiceComponentDesiredStateTable();
     createSettingTable();
     updateRepoVersionTableDDL();
     updateServiceComponentDesiredStateTableDDL();
@@ -562,4 +566,14 @@ public class UpgradeCatalog240 extends AbstractUpgradeCatalog {
 
     addSequence("servicecomponent_history_id_seq", 0L, false);
   }
+  /**
+   * Alter servicecomponentdesiredstate table to add recovery_enabled column.
+   * @throws SQLException
+   */
+  private void updateServiceComponentDesiredStateTable() throws SQLException {
+    // ALTER TABLE servicecomponentdesiredstate ADD COLUMN
+    // recovery_enabled SMALLINT DEFAULT 0 NOT NULL
+    dbAccessor.addColumn(SERVICE_COMPONENT_DESIRED_STATE_TABLE,
+            new DBAccessor.DBColumnInfo(RECOVERY_ENABLED_COL, Short.class, null, 0, false));
+  }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/dea22be1/ambari-server/src/main/resources/Ambari-DDL-Derby-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-Derby-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-Derby-CREATE.sql
index 2db745b..73cf84e 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-Derby-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-Derby-CREATE.sql
@@ -177,8 +177,10 @@ CREATE TABLE servicecomponentdesiredstate (
   desired_stack_id BIGINT NOT NULL,
   desired_state VARCHAR(255) NOT NULL,
   service_name VARCHAR(255) NOT NULL,
+  recovery_enabled SMALLINT NOT NULL DEFAULT 0,
   CONSTRAINT pk_sc_desiredstate PRIMARY KEY (id),
   CONSTRAINT unq_scdesiredstate_name UNIQUE(component_name, service_name, cluster_id)
+  PRIMARY KEY (component_name, cluster_id, service_name)
 );
 
 CREATE TABLE servicedesiredstate (

http://git-wip-us.apache.org/repos/asf/ambari/blob/dea22be1/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql
index b892bc8..9353ac2 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql
@@ -178,8 +178,10 @@ CREATE TABLE servicecomponentdesiredstate (
   desired_stack_id BIGINT NOT NULL,
   desired_state VARCHAR(255) NOT NULL,
   service_name VARCHAR(100) NOT NULL,
+  recovery_enabled SMALLINT NOT NULL DEFAULT 0,
   CONSTRAINT pk_sc_desiredstate PRIMARY KEY (id),
   CONSTRAINT unq_scdesiredstate_name UNIQUE(component_name, service_name, cluster_id)
+  PRIMARY KEY (component_name, cluster_id, service_name)
 );
 
 CREATE TABLE servicedesiredstate (
@@ -683,6 +685,7 @@ CREATE TABLE setting (
 -- tasks indices --
 CREATE INDEX idx_stage_request_id ON stage (request_id);
 CREATE INDEX idx_hrc_request_id ON host_role_command (request_id);
+CREATE INDEX idx_hrc_status_role ON host_role_command (status, role);
 CREATE INDEX idx_rsc_request_id ON role_success_criteria (request_id);
 
 -- altering tables by creating unique constraints----------

http://git-wip-us.apache.org/repos/asf/ambari/blob/dea22be1/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql
index 026efea..5f39b44 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql
@@ -168,8 +168,10 @@ CREATE TABLE servicecomponentdesiredstate (
   desired_stack_id NUMBER(19) NOT NULL,
   desired_state VARCHAR2(255) NOT NULL,
   service_name VARCHAR2(255) NOT NULL,
+  recovery_enabled SMALLINT DEFAULT 0 NOT NULL,
   CONSTRAINT pk_sc_desiredstate PRIMARY KEY (id),
   CONSTRAINT unq_scdesiredstate_name UNIQUE(component_name, service_name, cluster_id)
+  PRIMARY KEY (component_name, cluster_id, service_name)
 );
 
 CREATE TABLE servicedesiredstate (
@@ -672,6 +674,7 @@ CREATE TABLE setting (
 -- tasks indices --
 CREATE INDEX idx_stage_request_id ON stage (request_id);
 CREATE INDEX idx_hrc_request_id ON host_role_command (request_id);
+CREATE INDEX idx_hrc_status_role ON host_role_command (status, role);
 CREATE INDEX idx_rsc_request_id ON role_success_criteria (request_id);
 
 --------altering tables by creating unique constraints----------

http://git-wip-us.apache.org/repos/asf/ambari/blob/dea22be1/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql
index fb9889d..4a8fa2a 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql
@@ -177,8 +177,10 @@ CREATE TABLE servicecomponentdesiredstate (
   desired_stack_id BIGINT NOT NULL,
   desired_state VARCHAR(255) NOT NULL,
   service_name VARCHAR(255) NOT NULL,
+  recovery_enabled SMALLINT NOT NULL DEFAULT 0,
   CONSTRAINT pk_sc_desiredstate PRIMARY KEY (id),
   CONSTRAINT unq_scdesiredstate_name UNIQUE(component_name, service_name, cluster_id)
+  PRIMARY KEY (component_name, cluster_id, service_name)
 );
 
 CREATE TABLE servicedesiredstate (
@@ -676,8 +678,11 @@ CREATE TABLE setting (
 -- tasks indices --
 CREATE INDEX idx_stage_request_id ON stage (request_id);
 CREATE INDEX idx_hrc_request_id ON host_role_command (request_id);
+CREATE INDEX idx_hrc_status_role ON host_role_command (status, role);
 CREATE INDEX idx_rsc_request_id ON role_success_criteria (request_id);
 
+
+
 --------altering tables by creating unique constraints----------
 ALTER TABLE users ADD CONSTRAINT UNQ_users_0 UNIQUE (user_name, user_type);
 ALTER TABLE clusterconfig ADD CONSTRAINT UQ_config_type_tag UNIQUE (cluster_id, type_name, version_tag);


[45/50] [abbrv] ambari git commit: AMBARI-14996. Component should support a desired version (dlysnichenko)

Posted by nc...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/cbef0c14/ambari-server/src/main/resources/Ambari-DDL-Derby-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-Derby-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-Derby-CREATE.sql
index bd7755c..9c61cbc 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-Derby-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-Derby-CREATE.sql
@@ -27,6 +27,7 @@ CREATE TABLE stack(
 CREATE TABLE clusters (
   cluster_id BIGINT NOT NULL,
   resource_id BIGINT NOT NULL,
+  upgrade_id BIGINT,
   cluster_info VARCHAR(255) NOT NULL,
   cluster_name VARCHAR(100) NOT NULL UNIQUE,
   provisioning_state VARCHAR(255) NOT NULL DEFAULT 'INIT',
@@ -176,6 +177,7 @@ CREATE TABLE servicecomponentdesiredstate (
   cluster_id BIGINT NOT NULL,
   desired_stack_id BIGINT NOT NULL,
   desired_state VARCHAR(255) NOT NULL,
+  desired_version VARCHAR(255) NOT NULL DEFAULT 'UNKNOWN',
   service_name VARCHAR(255) NOT NULL,
   recovery_enabled SMALLINT NOT NULL DEFAULT 0,
   CONSTRAINT pk_sc_desiredstate PRIMARY KEY (id),
@@ -673,6 +675,60 @@ CREATE TABLE setting (
   PRIMARY KEY (id)
 );
 
+
+-- upgrade tables
+CREATE TABLE upgrade (
+  upgrade_id BIGINT NOT NULL,
+  cluster_id BIGINT NOT NULL,
+  request_id BIGINT NOT NULL,
+  from_version VARCHAR(255) DEFAULT '' NOT NULL,
+  to_version VARCHAR(255) DEFAULT '' NOT NULL,
+  direction VARCHAR(255) DEFAULT 'UPGRADE' NOT NULL,
+  upgrade_package VARCHAR(255) NOT NULL,
+  upgrade_type VARCHAR(32) NOT NULL,
+  skip_failures SMALLINT DEFAULT 0 NOT NULL,
+  skip_sc_failures SMALLINT DEFAULT 0 NOT NULL,
+  downgrade_allowed SMALLINT DEFAULT 1 NOT NULL,
+  PRIMARY KEY (upgrade_id),
+  FOREIGN KEY (cluster_id) REFERENCES clusters(cluster_id),
+  FOREIGN KEY (request_id) REFERENCES request(request_id)
+);
+
+CREATE TABLE upgrade_group (
+  upgrade_group_id BIGINT NOT NULL,
+  upgrade_id BIGINT NOT NULL,
+  group_name VARCHAR(255) DEFAULT '' NOT NULL,
+  group_title VARCHAR(1024) DEFAULT '' NOT NULL,
+  PRIMARY KEY (upgrade_group_id),
+  FOREIGN KEY (upgrade_id) REFERENCES upgrade(upgrade_id)
+);
+
+CREATE TABLE upgrade_item (
+  upgrade_item_id BIGINT NOT NULL,
+  upgrade_group_id BIGINT NOT NULL,
+  stage_id BIGINT NOT NULL,
+  state VARCHAR(255) DEFAULT 'NONE' NOT NULL,
+  hosts VARCHAR(3000),
+  tasks VARCHAR(3000),
+  item_text VARCHAR(1024),
+  PRIMARY KEY (upgrade_item_id),
+  FOREIGN KEY (upgrade_group_id) REFERENCES upgrade_group(upgrade_group_id)
+);
+
+CREATE TABLE servicecomponent_history(
+  id BIGINT NOT NULL,
+  component_id BIGINT NOT NULL,
+  upgrade_id BIGINT NOT NULL,
+  from_stack_id BIGINT NOT NULL,
+  to_stack_id BIGINT NOT NULL,
+  CONSTRAINT PK_sc_history PRIMARY KEY (id),
+  CONSTRAINT FK_sc_history_component_id FOREIGN KEY (component_id) REFERENCES servicecomponentdesiredstate (id),
+  CONSTRAINT FK_sc_history_upgrade_id FOREIGN KEY (upgrade_id) REFERENCES upgrade (upgrade_id),
+  CONSTRAINT FK_sc_history_from_stack_id FOREIGN KEY (from_stack_id) REFERENCES stack (stack_id),
+  CONSTRAINT FK_sc_history_to_stack_id FOREIGN KEY (to_stack_id) REFERENCES stack (stack_id)
+);
+
+
 -- tasks indices --
 CREATE INDEX idx_stage_request_id ON stage (request_id);
 CREATE INDEX idx_hrc_request_id ON host_role_command (request_id);
@@ -695,6 +751,7 @@ ALTER TABLE stack ADD CONSTRAINT unq_stack UNIQUE (stack_name, stack_version);
 -- Note, Oracle has a limitation of 32 chars in the FK name, and we should use the same FK name in all DB types.
 ALTER TABLE members ADD CONSTRAINT FK_members_group_id FOREIGN KEY (group_id) REFERENCES groups (group_id);
 ALTER TABLE members ADD CONSTRAINT FK_members_user_id FOREIGN KEY (user_id) REFERENCES users (user_id);
+ALTER TABLE clusters ADD CONSTRAINT FK_clusters_upgrade_id FOREIGN KEY (upgrade_id) REFERENCES upgrade (upgrade_id);
 ALTER TABLE clusterconfig ADD CONSTRAINT FK_clusterconfig_cluster_id FOREIGN KEY (cluster_id) REFERENCES clusters (cluster_id);
 ALTER TABLE clusterservices ADD CONSTRAINT FK_clusterservices_cluster_id FOREIGN KEY (cluster_id) REFERENCES clusters (cluster_id);
 ALTER TABLE clusterconfigmapping ADD CONSTRAINT clusterconfigmappingcluster_id FOREIGN KEY (cluster_id) REFERENCES clusters (cluster_id);
@@ -914,58 +971,6 @@ CREATE INDEX idx_alert_history_state on alert_history(alert_state);
 CREATE INDEX idx_alert_group_name on alert_group(group_name);
 CREATE INDEX idx_alert_notice_state on alert_notice(notify_state);
 
--- upgrade tables
-CREATE TABLE upgrade (
-  upgrade_id BIGINT NOT NULL,
-  cluster_id BIGINT NOT NULL,
-  request_id BIGINT NOT NULL,
-  from_version VARCHAR(255) DEFAULT '' NOT NULL,
-  to_version VARCHAR(255) DEFAULT '' NOT NULL,
-  direction VARCHAR(255) DEFAULT 'UPGRADE' NOT NULL,
-  upgrade_package VARCHAR(255) NOT NULL,
-  upgrade_type VARCHAR(32) NOT NULL,
-  skip_failures SMALLINT DEFAULT 0 NOT NULL,
-  skip_sc_failures SMALLINT DEFAULT 0 NOT NULL,
-  downgrade_allowed SMALLINT DEFAULT 1 NOT NULL,
-  PRIMARY KEY (upgrade_id),
-  FOREIGN KEY (cluster_id) REFERENCES clusters(cluster_id),
-  FOREIGN KEY (request_id) REFERENCES request(request_id)
-);
-
-CREATE TABLE upgrade_group (
-  upgrade_group_id BIGINT NOT NULL,
-  upgrade_id BIGINT NOT NULL,
-  group_name VARCHAR(255) DEFAULT '' NOT NULL,
-  group_title VARCHAR(1024) DEFAULT '' NOT NULL,
-  PRIMARY KEY (upgrade_group_id),
-  FOREIGN KEY (upgrade_id) REFERENCES upgrade(upgrade_id)
-);
-
-CREATE TABLE upgrade_item (
-  upgrade_item_id BIGINT NOT NULL,
-  upgrade_group_id BIGINT NOT NULL,
-  stage_id BIGINT NOT NULL,
-  state VARCHAR(255) DEFAULT 'NONE' NOT NULL,
-  hosts VARCHAR(3000),
-  tasks VARCHAR(3000),
-  item_text VARCHAR(1024),
-  PRIMARY KEY (upgrade_item_id),
-  FOREIGN KEY (upgrade_group_id) REFERENCES upgrade_group(upgrade_group_id)
-);
-
-CREATE TABLE servicecomponent_history(
-  id BIGINT NOT NULL,
-  component_id BIGINT NOT NULL,
-  upgrade_id BIGINT NOT NULL,
-  from_stack_id BIGINT NOT NULL,
-  to_stack_id BIGINT NOT NULL,
-  CONSTRAINT PK_sc_history PRIMARY KEY (id),
-  CONSTRAINT FK_sc_history_component_id FOREIGN KEY (component_id) REFERENCES servicecomponentdesiredstate (id),
-  CONSTRAINT FK_sc_history_upgrade_id FOREIGN KEY (upgrade_id) REFERENCES upgrade (upgrade_id),
-  CONSTRAINT FK_sc_history_from_stack_id FOREIGN KEY (from_stack_id) REFERENCES stack (stack_id),
-  CONSTRAINT FK_sc_history_to_stack_id FOREIGN KEY (to_stack_id) REFERENCES stack (stack_id)
-);
-
 ---------inserting some data-----------
 -- In order for the first ID to be 1, must initialize the ambari_sequences table with a sequence_value of 0.
 -- BEGIN;

http://git-wip-us.apache.org/repos/asf/ambari/blob/cbef0c14/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql
index ac1c5d7..0ebfa40 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql
@@ -37,6 +37,7 @@ CREATE TABLE stack(
 CREATE TABLE clusters (
   cluster_id BIGINT NOT NULL,
   resource_id BIGINT NOT NULL,
+  upgrade_id BIGINT,
   cluster_info VARCHAR(255) NOT NULL,
   cluster_name VARCHAR(100) NOT NULL UNIQUE,
   provisioning_state VARCHAR(255) NOT NULL DEFAULT 'INIT',
@@ -176,6 +177,7 @@ CREATE TABLE servicecomponentdesiredstate (
   component_name VARCHAR(100) NOT NULL,
   cluster_id BIGINT NOT NULL,
   desired_stack_id BIGINT NOT NULL,
+  desired_version VARCHAR(255) NOT NULL DEFAULT 'UNKNOWN',
   desired_state VARCHAR(255) NOT NULL,
   service_name VARCHAR(100) NOT NULL,
   recovery_enabled SMALLINT NOT NULL DEFAULT 0,
@@ -681,6 +683,59 @@ CREATE TABLE setting (
   PRIMARY KEY (id)
 );
 
+
+-- upgrade tables
+CREATE TABLE upgrade (
+  upgrade_id BIGINT NOT NULL,
+  cluster_id BIGINT NOT NULL,
+  request_id BIGINT NOT NULL,
+  from_version VARCHAR(255) DEFAULT '' NOT NULL,
+  to_version VARCHAR(255) DEFAULT '' NOT NULL,
+  direction VARCHAR(255) DEFAULT 'UPGRADE' NOT NULL,
+  upgrade_package VARCHAR(255) NOT NULL,
+  upgrade_type VARCHAR(32) NOT NULL,
+  skip_failures TINYINT(1) NOT NULL DEFAULT 0,
+  skip_sc_failures TINYINT(1) NOT NULL DEFAULT 0,
+  downgrade_allowed TINYINT(1) NOT NULL DEFAULT 1,
+  PRIMARY KEY (upgrade_id),
+  FOREIGN KEY (cluster_id) REFERENCES clusters(cluster_id),
+  FOREIGN KEY (request_id) REFERENCES request(request_id)
+);
+
+CREATE TABLE upgrade_group (
+  upgrade_group_id BIGINT NOT NULL,
+  upgrade_id BIGINT NOT NULL,
+  group_name VARCHAR(255) DEFAULT '' NOT NULL,
+  group_title VARCHAR(1024) DEFAULT '' NOT NULL,
+  PRIMARY KEY (upgrade_group_id),
+  FOREIGN KEY (upgrade_id) REFERENCES upgrade(upgrade_id)
+);
+
+CREATE TABLE upgrade_item (
+  upgrade_item_id BIGINT NOT NULL,
+  upgrade_group_id BIGINT NOT NULL,
+  stage_id BIGINT NOT NULL,
+  state VARCHAR(255) DEFAULT 'NONE' NOT NULL,
+  hosts TEXT,
+  tasks TEXT,
+  item_text VARCHAR(1024),
+  PRIMARY KEY (upgrade_item_id),
+  FOREIGN KEY (upgrade_group_id) REFERENCES upgrade_group(upgrade_group_id)
+);
+
+CREATE TABLE servicecomponent_history(
+  id BIGINT NOT NULL,
+  component_id BIGINT NOT NULL,
+  upgrade_id BIGINT NOT NULL,
+  from_stack_id BIGINT NOT NULL,
+  to_stack_id BIGINT NOT NULL,
+  CONSTRAINT PK_sc_history PRIMARY KEY (id),
+  CONSTRAINT FK_sc_history_component_id FOREIGN KEY (component_id) REFERENCES servicecomponentdesiredstate (id),
+  CONSTRAINT FK_sc_history_upgrade_id FOREIGN KEY (upgrade_id) REFERENCES upgrade (upgrade_id),
+  CONSTRAINT FK_sc_history_from_stack_id FOREIGN KEY (from_stack_id) REFERENCES stack (stack_id),
+  CONSTRAINT FK_sc_history_to_stack_id FOREIGN KEY (to_stack_id) REFERENCES stack (stack_id)
+);
+
 -- tasks indices --
 CREATE INDEX idx_stage_request_id ON stage (request_id);
 CREATE INDEX idx_hrc_request_id ON host_role_command (request_id);
@@ -706,6 +761,7 @@ ALTER TABLE stack ADD CONSTRAINT unq_stack UNIQUE (stack_name, stack_version);
 -- Note, Oracle has a limitation of 32 chars in the FK name, and we should use the same FK name in all DB types.
 ALTER TABLE members ADD CONSTRAINT FK_members_group_id FOREIGN KEY (group_id) REFERENCES groups (group_id);
 ALTER TABLE members ADD CONSTRAINT FK_members_user_id FOREIGN KEY (user_id) REFERENCES users (user_id);
+ALTER TABLE clusters ADD CONSTRAINT FK_clusters_upgrade_id FOREIGN KEY (upgrade_id) REFERENCES upgrade (upgrade_id);
 ALTER TABLE clusterconfig ADD CONSTRAINT FK_clusterconfig_cluster_id FOREIGN KEY (cluster_id) REFERENCES clusters (cluster_id);
 ALTER TABLE clusterservices ADD CONSTRAINT FK_clusterservices_cluster_id FOREIGN KEY (cluster_id) REFERENCES clusters (cluster_id);
 ALTER TABLE clusterconfigmapping ADD CONSTRAINT clusterconfigmappingcluster_id FOREIGN KEY (cluster_id) REFERENCES clusters (cluster_id);
@@ -925,58 +981,6 @@ CREATE INDEX idx_alert_history_state on alert_history(alert_state);
 CREATE INDEX idx_alert_group_name on alert_group(group_name);
 CREATE INDEX idx_alert_notice_state on alert_notice(notify_state);
 
--- upgrade tables
-CREATE TABLE upgrade (
-  upgrade_id BIGINT NOT NULL,
-  cluster_id BIGINT NOT NULL,
-  request_id BIGINT NOT NULL,
-  from_version VARCHAR(255) DEFAULT '' NOT NULL,
-  to_version VARCHAR(255) DEFAULT '' NOT NULL,
-  direction VARCHAR(255) DEFAULT 'UPGRADE' NOT NULL,
-  upgrade_package VARCHAR(255) NOT NULL,
-  upgrade_type VARCHAR(32) NOT NULL,
-  skip_failures TINYINT(1) NOT NULL DEFAULT 0,
-  skip_sc_failures TINYINT(1) NOT NULL DEFAULT 0,
-  downgrade_allowed TINYINT(1) NOT NULL DEFAULT 1,
-  PRIMARY KEY (upgrade_id),
-  FOREIGN KEY (cluster_id) REFERENCES clusters(cluster_id),
-  FOREIGN KEY (request_id) REFERENCES request(request_id)
-);
-
-CREATE TABLE upgrade_group (
-  upgrade_group_id BIGINT NOT NULL,
-  upgrade_id BIGINT NOT NULL,
-  group_name VARCHAR(255) DEFAULT '' NOT NULL,
-  group_title VARCHAR(1024) DEFAULT '' NOT NULL,
-  PRIMARY KEY (upgrade_group_id),
-  FOREIGN KEY (upgrade_id) REFERENCES upgrade(upgrade_id)
-);
-
-CREATE TABLE upgrade_item (
-  upgrade_item_id BIGINT NOT NULL,
-  upgrade_group_id BIGINT NOT NULL,
-  stage_id BIGINT NOT NULL,
-  state VARCHAR(255) DEFAULT 'NONE' NOT NULL,
-  hosts TEXT,
-  tasks TEXT,
-  item_text VARCHAR(1024),
-  PRIMARY KEY (upgrade_item_id),
-  FOREIGN KEY (upgrade_group_id) REFERENCES upgrade_group(upgrade_group_id)
-);
-
-CREATE TABLE servicecomponent_history(
-  id BIGINT NOT NULL,
-  component_id BIGINT NOT NULL,
-  upgrade_id BIGINT NOT NULL,
-  from_stack_id BIGINT NOT NULL,
-  to_stack_id BIGINT NOT NULL,
-  CONSTRAINT PK_sc_history PRIMARY KEY (id),
-  CONSTRAINT FK_sc_history_component_id FOREIGN KEY (component_id) REFERENCES servicecomponentdesiredstate (id),
-  CONSTRAINT FK_sc_history_upgrade_id FOREIGN KEY (upgrade_id) REFERENCES upgrade (upgrade_id),
-  CONSTRAINT FK_sc_history_from_stack_id FOREIGN KEY (from_stack_id) REFERENCES stack (stack_id),
-  CONSTRAINT FK_sc_history_to_stack_id FOREIGN KEY (to_stack_id) REFERENCES stack (stack_id)
-);
-
 -- In order for the first ID to be 1, must initialize the ambari_sequences table with a sequence_value of 0.
 INSERT INTO ambari_sequences(sequence_name, sequence_value) values ('cluster_id_seq', 1);
 INSERT INTO ambari_sequences(sequence_name, sequence_value) values ('host_id_seq', 0);

http://git-wip-us.apache.org/repos/asf/ambari/blob/cbef0c14/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql
index 4ed3a19..a8cbda3 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql
@@ -27,6 +27,7 @@ CREATE TABLE stack(
 CREATE TABLE clusters (
   cluster_id NUMBER(19) NOT NULL,
   resource_id NUMBER(19) NOT NULL,
+  upgrade_id NUMBER(19),
   cluster_info VARCHAR2(255) NULL,
   cluster_name VARCHAR2(100) NOT NULL UNIQUE,
   provisioning_state VARCHAR2(255) DEFAULT 'INIT' NOT NULL,
@@ -167,6 +168,7 @@ CREATE TABLE servicecomponentdesiredstate (
   cluster_id NUMBER(19) NOT NULL,
   desired_stack_id NUMBER(19) NOT NULL,
   desired_state VARCHAR2(255) NOT NULL,
+  desired_version VARCHAR(255) DEFAULT 'UNKNOWN' NOT NULL,
   service_name VARCHAR2(255) NOT NULL,
   recovery_enabled SMALLINT DEFAULT 0 NOT NULL,
   CONSTRAINT pk_sc_desiredstate PRIMARY KEY (id),
@@ -670,6 +672,60 @@ CREATE TABLE setting (
   PRIMARY KEY (id)
 );
 
+
+-- upgrade tables
+CREATE TABLE upgrade (
+  upgrade_id NUMBER(19) NOT NULL,
+  cluster_id NUMBER(19) NOT NULL,
+  request_id NUMBER(19) NOT NULL,
+  from_version VARCHAR2(255) DEFAULT '' NOT NULL,
+  to_version VARCHAR2(255) DEFAULT '' NOT NULL,
+  direction VARCHAR2(255) DEFAULT 'UPGRADE' NOT NULL,
+  upgrade_package VARCHAR2(255) NOT NULL,
+  upgrade_type VARCHAR2(32) NOT NULL,
+  skip_failures NUMBER(1) DEFAULT 0 NOT NULL,
+  skip_sc_failures NUMBER(1) DEFAULT 0 NOT NULL,
+  downgrade_allowed NUMBER(1) DEFAULT 1 NOT NULL,
+  PRIMARY KEY (upgrade_id),
+  FOREIGN KEY (cluster_id) REFERENCES clusters(cluster_id),
+  FOREIGN KEY (request_id) REFERENCES request(request_id)
+);
+
+CREATE TABLE upgrade_group (
+  upgrade_group_id NUMBER(19) NOT NULL,
+  upgrade_id NUMBER(19) NOT NULL,
+  group_name VARCHAR2(255) DEFAULT '' NOT NULL,
+  group_title VARCHAR2(1024) DEFAULT '' NOT NULL,
+  PRIMARY KEY (upgrade_group_id),
+  FOREIGN KEY (upgrade_id) REFERENCES upgrade(upgrade_id)
+);
+
+CREATE TABLE upgrade_item (
+  upgrade_item_id NUMBER(19) NOT NULL,
+  upgrade_group_id NUMBER(19) NOT NULL,
+  stage_id NUMBER(19) NOT NULL,
+  state VARCHAR2(255) DEFAULT 'NONE' NOT NULL,
+  hosts CLOB,
+  tasks CLOB,
+  item_text VARCHAR2(1024),
+  PRIMARY KEY (upgrade_item_id),
+  FOREIGN KEY (upgrade_group_id) REFERENCES upgrade_group(upgrade_group_id)
+);
+
+CREATE TABLE servicecomponent_history(
+  id NUMBER(19) NOT NULL,
+  component_id NUMBER(19) NOT NULL,
+  upgrade_id NUMBER(19) NOT NULL,
+  from_stack_id NUMBER(19) NOT NULL,
+  to_stack_id NUMBER(19) NOT NULL,
+  CONSTRAINT PK_sc_history PRIMARY KEY (id),
+  CONSTRAINT FK_sc_history_component_id FOREIGN KEY (component_id) REFERENCES servicecomponentdesiredstate (id),
+  CONSTRAINT FK_sc_history_upgrade_id FOREIGN KEY (upgrade_id) REFERENCES upgrade (upgrade_id),
+  CONSTRAINT FK_sc_history_from_stack_id FOREIGN KEY (from_stack_id) REFERENCES stack (stack_id),
+  CONSTRAINT FK_sc_history_to_stack_id FOREIGN KEY (to_stack_id) REFERENCES stack (stack_id)
+);
+
+
 -- tasks indices --
 CREATE INDEX idx_stage_request_id ON stage (request_id);
 CREATE INDEX idx_hrc_request_id ON host_role_command (request_id);
@@ -695,6 +751,7 @@ ALTER TABLE stack ADD CONSTRAINT unq_stack UNIQUE (stack_name, stack_version);
 -- Note, Oracle has a limitation of 32 chars in the FK name, and we should use the same FK name in all DB types.
 ALTER TABLE members ADD CONSTRAINT FK_members_group_id FOREIGN KEY (group_id) REFERENCES groups (group_id);
 ALTER TABLE members ADD CONSTRAINT FK_members_user_id FOREIGN KEY (user_id) REFERENCES users (user_id);
+ALTER TABLE clusters ADD CONSTRAINT FK_clusters_upgrade_id FOREIGN KEY (upgrade_id) REFERENCES upgrade (upgrade_id);
 ALTER TABLE clusterconfig ADD CONSTRAINT FK_clusterconfig_cluster_id FOREIGN KEY (cluster_id) REFERENCES clusters (cluster_id);
 ALTER TABLE serviceconfighosts ADD CONSTRAINT FK_scvhosts_scv FOREIGN KEY (service_config_id) REFERENCES serviceconfig(service_config_id);
 ALTER TABLE serviceconfighosts ADD CONSTRAINT FK_scvhosts_host_id FOREIGN KEY (host_id) REFERENCES hosts(host_id);
@@ -914,58 +971,6 @@ CREATE INDEX idx_alert_history_state on alert_history(alert_state);
 CREATE INDEX idx_alert_group_name on alert_group(group_name);
 CREATE INDEX idx_alert_notice_state on alert_notice(notify_state);
 
--- upgrade tables
-CREATE TABLE upgrade (
-  upgrade_id NUMBER(19) NOT NULL,
-  cluster_id NUMBER(19) NOT NULL,
-  request_id NUMBER(19) NOT NULL,
-  from_version VARCHAR2(255) DEFAULT '' NOT NULL,
-  to_version VARCHAR2(255) DEFAULT '' NOT NULL,
-  direction VARCHAR2(255) DEFAULT 'UPGRADE' NOT NULL,
-  upgrade_package VARCHAR2(255) NOT NULL,
-  upgrade_type VARCHAR2(32) NOT NULL,
-  skip_failures NUMBER(1) DEFAULT 0 NOT NULL,
-  skip_sc_failures NUMBER(1) DEFAULT 0 NOT NULL,
-  downgrade_allowed NUMBER(1) DEFAULT 1 NOT NULL,
-  PRIMARY KEY (upgrade_id),
-  FOREIGN KEY (cluster_id) REFERENCES clusters(cluster_id),
-  FOREIGN KEY (request_id) REFERENCES request(request_id)
-);
-
-CREATE TABLE upgrade_group (
-  upgrade_group_id NUMBER(19) NOT NULL,
-  upgrade_id NUMBER(19) NOT NULL,
-  group_name VARCHAR2(255) DEFAULT '' NOT NULL,
-  group_title VARCHAR2(1024) DEFAULT '' NOT NULL,
-  PRIMARY KEY (upgrade_group_id),
-  FOREIGN KEY (upgrade_id) REFERENCES upgrade(upgrade_id)
-);
-
-CREATE TABLE upgrade_item (
-  upgrade_item_id NUMBER(19) NOT NULL,
-  upgrade_group_id NUMBER(19) NOT NULL,
-  stage_id NUMBER(19) NOT NULL,
-  state VARCHAR2(255) DEFAULT 'NONE' NOT NULL,
-  hosts CLOB,
-  tasks CLOB,
-  item_text VARCHAR2(1024),
-  PRIMARY KEY (upgrade_item_id),
-  FOREIGN KEY (upgrade_group_id) REFERENCES upgrade_group(upgrade_group_id)
-);
-
-CREATE TABLE servicecomponent_history(
-  id NUMBER(19) NOT NULL,
-  component_id NUMBER(19) NOT NULL,
-  upgrade_id NUMBER(19) NOT NULL,
-  from_stack_id NUMBER(19) NOT NULL,
-  to_stack_id NUMBER(19) NOT NULL,
-  CONSTRAINT PK_sc_history PRIMARY KEY (id),
-  CONSTRAINT FK_sc_history_component_id FOREIGN KEY (component_id) REFERENCES servicecomponentdesiredstate (id),
-  CONSTRAINT FK_sc_history_upgrade_id FOREIGN KEY (upgrade_id) REFERENCES upgrade (upgrade_id),
-  CONSTRAINT FK_sc_history_from_stack_id FOREIGN KEY (from_stack_id) REFERENCES stack (stack_id),
-  CONSTRAINT FK_sc_history_to_stack_id FOREIGN KEY (to_stack_id) REFERENCES stack (stack_id)
-);
-
 ---------inserting some data-----------
 -- In order for the first ID to be 1, must initialize the ambari_sequences table with a sequence_value of 0.
 INSERT INTO ambari_sequences(sequence_name, sequence_value) values ('host_role_command_id_seq', 0);

http://git-wip-us.apache.org/repos/asf/ambari/blob/cbef0c14/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql
index 5d7be25..bb47a8a 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql
@@ -27,6 +27,7 @@ CREATE TABLE stack(
 CREATE TABLE clusters (
   cluster_id BIGINT NOT NULL,
   resource_id BIGINT NOT NULL,
+  upgrade_id BIGINT,
   cluster_info VARCHAR(255) NOT NULL,
   cluster_name VARCHAR(100) NOT NULL UNIQUE,
   provisioning_state VARCHAR(255) NOT NULL DEFAULT 'INIT',
@@ -175,6 +176,7 @@ CREATE TABLE servicecomponentdesiredstate (
   component_name VARCHAR(255) NOT NULL,
   cluster_id BIGINT NOT NULL,
   desired_stack_id BIGINT NOT NULL,
+  desired_version VARCHAR(255) NOT NULL DEFAULT 'UNKNOWN',
   desired_state VARCHAR(255) NOT NULL,
   service_name VARCHAR(255) NOT NULL,
   recovery_enabled SMALLINT NOT NULL DEFAULT 0,
@@ -674,6 +676,60 @@ CREATE TABLE setting (
   PRIMARY KEY (id)
 );
 
+
+-- upgrade tables
+CREATE TABLE upgrade (
+  upgrade_id BIGINT NOT NULL,
+  cluster_id BIGINT NOT NULL,
+  request_id BIGINT NOT NULL,
+  from_version VARCHAR(255) DEFAULT '' NOT NULL,
+  to_version VARCHAR(255) DEFAULT '' NOT NULL,
+  direction VARCHAR(255) DEFAULT 'UPGRADE' NOT NULL,
+  upgrade_package VARCHAR(255) NOT NULL,
+  upgrade_type VARCHAR(32) NOT NULL,
+  skip_failures SMALLINT DEFAULT 0 NOT NULL,
+  skip_sc_failures SMALLINT DEFAULT 0 NOT NULL,
+  downgrade_allowed SMALLINT DEFAULT 1 NOT NULL,
+  PRIMARY KEY (upgrade_id),
+  FOREIGN KEY (cluster_id) REFERENCES clusters(cluster_id),
+  FOREIGN KEY (request_id) REFERENCES request(request_id)
+);
+
+CREATE TABLE upgrade_group (
+  upgrade_group_id BIGINT NOT NULL,
+  upgrade_id BIGINT NOT NULL,
+  group_name VARCHAR(255) DEFAULT '' NOT NULL,
+  group_title VARCHAR(1024) DEFAULT '' NOT NULL,
+  PRIMARY KEY (upgrade_group_id),
+  FOREIGN KEY (upgrade_id) REFERENCES upgrade(upgrade_id)
+);
+
+CREATE TABLE upgrade_item (
+  upgrade_item_id BIGINT NOT NULL,
+  upgrade_group_id BIGINT NOT NULL,
+  stage_id BIGINT NOT NULL,
+  state VARCHAR(255) DEFAULT 'NONE' NOT NULL,
+  hosts TEXT,
+  tasks TEXT,
+  item_text VARCHAR(1024),
+  PRIMARY KEY (upgrade_item_id),
+  FOREIGN KEY (upgrade_group_id) REFERENCES upgrade_group(upgrade_group_id)
+);
+
+CREATE TABLE servicecomponent_history(
+  id BIGINT NOT NULL,
+  component_id BIGINT NOT NULL,
+  upgrade_id BIGINT NOT NULL,
+  from_stack_id BIGINT NOT NULL,
+  to_stack_id BIGINT NOT NULL,
+  CONSTRAINT PK_sc_history PRIMARY KEY (id),
+  CONSTRAINT FK_sc_history_component_id FOREIGN KEY (component_id) REFERENCES servicecomponentdesiredstate (id),
+  CONSTRAINT FK_sc_history_upgrade_id FOREIGN KEY (upgrade_id) REFERENCES upgrade (upgrade_id),
+  CONSTRAINT FK_sc_history_from_stack_id FOREIGN KEY (from_stack_id) REFERENCES stack (stack_id),
+  CONSTRAINT FK_sc_history_to_stack_id FOREIGN KEY (to_stack_id) REFERENCES stack (stack_id)
+);
+
+
 -- tasks indices --
 CREATE INDEX idx_stage_request_id ON stage (request_id);
 CREATE INDEX idx_hrc_request_id ON host_role_command (request_id);
@@ -699,6 +755,7 @@ ALTER TABLE stack ADD CONSTRAINT unq_stack UNIQUE (stack_name, stack_version);
 -- Note, Oracle has a limitation of 32 chars in the FK name, and we should use the same FK name in all DB types.
 ALTER TABLE members ADD CONSTRAINT FK_members_group_id FOREIGN KEY (group_id) REFERENCES groups (group_id);
 ALTER TABLE members ADD CONSTRAINT FK_members_user_id FOREIGN KEY (user_id) REFERENCES users (user_id);
+ALTER TABLE clusters ADD CONSTRAINT FK_clusters_upgrade_id FOREIGN KEY (upgrade_id) REFERENCES upgrade (upgrade_id);
 ALTER TABLE clusterconfig ADD CONSTRAINT FK_clusterconfig_cluster_id FOREIGN KEY (cluster_id) REFERENCES clusters (cluster_id);
 ALTER TABLE clusterservices ADD CONSTRAINT FK_clusterservices_cluster_id FOREIGN KEY (cluster_id) REFERENCES clusters (cluster_id);
 ALTER TABLE clusterconfigmapping ADD CONSTRAINT clusterconfigmappingcluster_id FOREIGN KEY (cluster_id) REFERENCES clusters (cluster_id);
@@ -918,58 +975,6 @@ CREATE INDEX idx_alert_history_state on alert_history(alert_state);
 CREATE INDEX idx_alert_group_name on alert_group(group_name);
 CREATE INDEX idx_alert_notice_state on alert_notice(notify_state);
 
--- upgrade tables
-CREATE TABLE upgrade (
-  upgrade_id BIGINT NOT NULL,
-  cluster_id BIGINT NOT NULL,
-  request_id BIGINT NOT NULL,
-  from_version VARCHAR(255) DEFAULT '' NOT NULL,
-  to_version VARCHAR(255) DEFAULT '' NOT NULL,
-  direction VARCHAR(255) DEFAULT 'UPGRADE' NOT NULL,
-  upgrade_package VARCHAR(255) NOT NULL,
-  upgrade_type VARCHAR(32) NOT NULL,
-  skip_failures SMALLINT DEFAULT 0 NOT NULL,
-  skip_sc_failures SMALLINT DEFAULT 0 NOT NULL,
-  downgrade_allowed SMALLINT DEFAULT 1 NOT NULL,
-  PRIMARY KEY (upgrade_id),
-  FOREIGN KEY (cluster_id) REFERENCES clusters(cluster_id),
-  FOREIGN KEY (request_id) REFERENCES request(request_id)
-);
-
-CREATE TABLE upgrade_group (
-  upgrade_group_id BIGINT NOT NULL,
-  upgrade_id BIGINT NOT NULL,
-  group_name VARCHAR(255) DEFAULT '' NOT NULL,
-  group_title VARCHAR(1024) DEFAULT '' NOT NULL,
-  PRIMARY KEY (upgrade_group_id),
-  FOREIGN KEY (upgrade_id) REFERENCES upgrade(upgrade_id)
-);
-
-CREATE TABLE upgrade_item (
-  upgrade_item_id BIGINT NOT NULL,
-  upgrade_group_id BIGINT NOT NULL,
-  stage_id BIGINT NOT NULL,
-  state VARCHAR(255) DEFAULT 'NONE' NOT NULL,
-  hosts TEXT,
-  tasks TEXT,
-  item_text VARCHAR(1024),
-  PRIMARY KEY (upgrade_item_id),
-  FOREIGN KEY (upgrade_group_id) REFERENCES upgrade_group(upgrade_group_id)
-);
-
-CREATE TABLE servicecomponent_history(
-  id BIGINT NOT NULL,
-  component_id BIGINT NOT NULL,
-  upgrade_id BIGINT NOT NULL,
-  from_stack_id BIGINT NOT NULL,
-  to_stack_id BIGINT NOT NULL,
-  CONSTRAINT PK_sc_history PRIMARY KEY (id),
-  CONSTRAINT FK_sc_history_component_id FOREIGN KEY (component_id) REFERENCES servicecomponentdesiredstate (id),
-  CONSTRAINT FK_sc_history_upgrade_id FOREIGN KEY (upgrade_id) REFERENCES upgrade (upgrade_id),
-  CONSTRAINT FK_sc_history_from_stack_id FOREIGN KEY (from_stack_id) REFERENCES stack (stack_id),
-  CONSTRAINT FK_sc_history_to_stack_id FOREIGN KEY (to_stack_id) REFERENCES stack (stack_id)
-);
-
 ---------inserting some data-----------
 -- In order for the first ID to be 1, must initialize the ambari_sequences table with a sequence_value of 0.
 BEGIN;

http://git-wip-us.apache.org/repos/asf/ambari/blob/cbef0c14/ambari-server/src/main/resources/Ambari-DDL-Postgres-EMBEDDED-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-Postgres-EMBEDDED-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-Postgres-EMBEDDED-CREATE.sql
index c032b8f..8ce2ba8 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-Postgres-EMBEDDED-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-Postgres-EMBEDDED-CREATE.sql
@@ -39,6 +39,7 @@ GRANT ALL PRIVILEGES ON TABLE ambari.stack TO :username;
 CREATE TABLE ambari.clusters (
   cluster_id BIGINT NOT NULL,
   resource_id BIGINT NOT NULL,
+  upgrade_id BIGINT,
   cluster_info VARCHAR(255) NOT NULL,
   cluster_name VARCHAR(100) NOT NULL UNIQUE,
   provisioning_state VARCHAR(255) NOT NULL DEFAULT 'INIT',
@@ -200,6 +201,7 @@ CREATE TABLE ambari.servicecomponentdesiredstate (
   component_name VARCHAR(255) NOT NULL,
   cluster_id BIGINT NOT NULL,
   desired_stack_id BIGINT NOT NULL,
+  desired_version VARCHAR(255) NOT NULL DEFAULT 'UNKNOWN',
   desired_state VARCHAR(255) NOT NULL,
   service_name VARCHAR(255) NOT NULL,
   recovery_enabled SMALLINT NOT NULL DEFAULT 0,
@@ -756,6 +758,63 @@ CREATE TABLE ambari.setting (
 );
 GRANT ALL PRIVILEGES ON TABLE ambari.setting TO :username;
 
+-- upgrade tables
+CREATE TABLE ambari.upgrade (
+  upgrade_id BIGINT NOT NULL,
+  cluster_id BIGINT NOT NULL,
+  request_id BIGINT NOT NULL,
+  from_version VARCHAR(255) DEFAULT '' NOT NULL,
+  to_version VARCHAR(255) DEFAULT '' NOT NULL,
+  direction VARCHAR(255) DEFAULT 'UPGRADE' NOT NULL,
+  upgrade_package VARCHAR(255) NOT NULL,
+  upgrade_type VARCHAR(32) NOT NULL,
+  skip_failures SMALLINT DEFAULT 0 NOT NULL,
+  skip_sc_failures SMALLINT DEFAULT 0 NOT NULL,
+  downgrade_allowed SMALLINT DEFAULT 1 NOT NULL,
+  PRIMARY KEY (upgrade_id),
+  FOREIGN KEY (cluster_id) REFERENCES ambari.clusters(cluster_id),
+  FOREIGN KEY (request_id) REFERENCES ambari.request(request_id)
+);
+
+CREATE TABLE ambari.upgrade_group (
+  upgrade_group_id BIGINT NOT NULL,
+  upgrade_id BIGINT NOT NULL,
+  group_name VARCHAR(255) DEFAULT '' NOT NULL,
+  group_title VARCHAR(1024) DEFAULT '' NOT NULL,
+  PRIMARY KEY (upgrade_group_id),
+  FOREIGN KEY (upgrade_id) REFERENCES ambari.upgrade(upgrade_id)
+);
+
+CREATE TABLE ambari.upgrade_item (
+  upgrade_item_id BIGINT NOT NULL,
+  upgrade_group_id BIGINT NOT NULL,
+  stage_id BIGINT NOT NULL,
+  state VARCHAR(255) DEFAULT 'NONE' NOT NULL,
+  hosts TEXT,
+  tasks TEXT,
+  item_text VARCHAR(1024),
+  PRIMARY KEY (upgrade_item_id),
+  FOREIGN KEY (upgrade_group_id) REFERENCES ambari.upgrade_group(upgrade_group_id)
+);
+
+CREATE TABLE ambari.servicecomponent_history(
+  id BIGINT NOT NULL,
+  component_id BIGINT NOT NULL,
+  upgrade_id BIGINT NOT NULL,
+  from_stack_id BIGINT NOT NULL,
+  to_stack_id BIGINT NOT NULL,
+  CONSTRAINT PK_sc_history PRIMARY KEY (id),
+  CONSTRAINT FK_sc_history_component_id FOREIGN KEY (component_id) REFERENCES ambari.servicecomponentdesiredstate (id),
+  CONSTRAINT FK_sc_history_upgrade_id FOREIGN KEY (upgrade_id) REFERENCES ambari.upgrade (upgrade_id),
+  CONSTRAINT FK_sc_history_from_stack_id FOREIGN KEY (from_stack_id) REFERENCES ambari.stack (stack_id),
+  CONSTRAINT FK_sc_history_to_stack_id FOREIGN KEY (to_stack_id) REFERENCES ambari.stack (stack_id)
+);
+
+GRANT ALL PRIVILEGES ON TABLE ambari.upgrade TO :username;
+GRANT ALL PRIVILEGES ON TABLE ambari.upgrade_group TO :username;
+GRANT ALL PRIVILEGES ON TABLE ambari.upgrade_item TO :username;
+GRANT ALL PRIVILEGES ON TABLE ambari.servicecomponent_history TO :username;
+
 -- tasks indices --
 CREATE INDEX idx_stage_request_id ON ambari.stage (request_id);
 CREATE INDEX idx_hrc_request_id ON ambari.host_role_command (request_id);
@@ -779,6 +838,7 @@ ALTER TABLE ambari.stack ADD CONSTRAINT unq_stack UNIQUE (stack_name, stack_vers
 -- Note, Oracle has a limitation of 32 chars in the FK name, and we should use the same FK name in all DB types.
 ALTER TABLE ambari.members ADD CONSTRAINT FK_members_group_id FOREIGN KEY (group_id) REFERENCES ambari.groups (group_id);
 ALTER TABLE ambari.members ADD CONSTRAINT FK_members_user_id FOREIGN KEY (user_id) REFERENCES ambari.users (user_id);
+ALTER TABLE ambari.clusters ADD CONSTRAINT FK_clusters_upgrade_id FOREIGN KEY (upgrade_id) REFERENCES ambari.upgrade (upgrade_id);
 ALTER TABLE ambari.clusterconfig ADD CONSTRAINT FK_clusterconfig_cluster_id FOREIGN KEY (cluster_id) REFERENCES ambari.clusters (cluster_id);
 ALTER TABLE ambari.clusterservices ADD CONSTRAINT FK_clusterservices_cluster_id FOREIGN KEY (cluster_id) REFERENCES ambari.clusters (cluster_id);
 ALTER TABLE ambari.clusterconfigmapping ADD CONSTRAINT clusterconfigmappingcluster_id FOREIGN KEY (cluster_id) REFERENCES ambari.clusters (cluster_id);
@@ -1011,63 +1071,6 @@ CREATE INDEX idx_alert_history_state on ambari.alert_history(alert_state);
 CREATE INDEX idx_alert_group_name on ambari.alert_group(group_name);
 CREATE INDEX idx_alert_notice_state on ambari.alert_notice(notify_state);
 
--- upgrade tables
-CREATE TABLE ambari.upgrade (
-  upgrade_id BIGINT NOT NULL,
-  cluster_id BIGINT NOT NULL,
-  request_id BIGINT NOT NULL,
-  from_version VARCHAR(255) DEFAULT '' NOT NULL,
-  to_version VARCHAR(255) DEFAULT '' NOT NULL,
-  direction VARCHAR(255) DEFAULT 'UPGRADE' NOT NULL,
-  upgrade_package VARCHAR(255) NOT NULL,
-  upgrade_type VARCHAR(32) NOT NULL,
-  skip_failures SMALLINT DEFAULT 0 NOT NULL,
-  skip_sc_failures SMALLINT DEFAULT 0 NOT NULL,
-  downgrade_allowed SMALLINT DEFAULT 1 NOT NULL,
-  PRIMARY KEY (upgrade_id),
-  FOREIGN KEY (cluster_id) REFERENCES ambari.clusters(cluster_id),
-  FOREIGN KEY (request_id) REFERENCES ambari.request(request_id)
-);
-
-CREATE TABLE ambari.upgrade_group (
-  upgrade_group_id BIGINT NOT NULL,
-  upgrade_id BIGINT NOT NULL,
-  group_name VARCHAR(255) DEFAULT '' NOT NULL,
-  group_title VARCHAR(1024) DEFAULT '' NOT NULL,
-  PRIMARY KEY (upgrade_group_id),
-  FOREIGN KEY (upgrade_id) REFERENCES ambari.upgrade(upgrade_id)
-);
-
-CREATE TABLE ambari.upgrade_item (
-  upgrade_item_id BIGINT NOT NULL,
-  upgrade_group_id BIGINT NOT NULL,
-  stage_id BIGINT NOT NULL,
-  state VARCHAR(255) DEFAULT 'NONE' NOT NULL,
-  hosts TEXT,
-  tasks TEXT,
-  item_text VARCHAR(1024),
-  PRIMARY KEY (upgrade_item_id),
-  FOREIGN KEY (upgrade_group_id) REFERENCES ambari.upgrade_group(upgrade_group_id)
-);
-
-GRANT ALL PRIVILEGES ON TABLE ambari.upgrade TO :username;
-GRANT ALL PRIVILEGES ON TABLE ambari.upgrade_group TO :username;
-GRANT ALL PRIVILEGES ON TABLE ambari.upgrade_item TO :username;
-
-CREATE TABLE ambari.servicecomponent_history(
-  id BIGINT NOT NULL,
-  component_id BIGINT NOT NULL,
-  upgrade_id BIGINT NOT NULL,
-  from_stack_id BIGINT NOT NULL,
-  to_stack_id BIGINT NOT NULL,
-  CONSTRAINT PK_sc_history PRIMARY KEY (id),
-  CONSTRAINT FK_sc_history_component_id FOREIGN KEY (component_id) REFERENCES ambari.servicecomponentdesiredstate (id),
-  CONSTRAINT FK_sc_history_upgrade_id FOREIGN KEY (upgrade_id) REFERENCES ambari.upgrade (upgrade_id),
-  CONSTRAINT FK_sc_history_from_stack_id FOREIGN KEY (from_stack_id) REFERENCES ambari.stack (stack_id),
-  CONSTRAINT FK_sc_history_to_stack_id FOREIGN KEY (to_stack_id) REFERENCES ambari.stack (stack_id)
-);
-GRANT ALL PRIVILEGES ON TABLE ambari.servicecomponent_history TO :username;
-
 ---------inserting some data-----------
 -- In order for the first ID to be 1, must initialize the ambari_sequences table with a sequence_value of 0.
 BEGIN;

http://git-wip-us.apache.org/repos/asf/ambari/blob/cbef0c14/ambari-server/src/main/resources/Ambari-DDL-SQLAnywhere-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-SQLAnywhere-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-SQLAnywhere-CREATE.sql
index 6a6b77b..b7a764e 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-SQLAnywhere-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-SQLAnywhere-CREATE.sql
@@ -26,6 +26,7 @@ CREATE TABLE stack(
 CREATE TABLE clusters (
   cluster_id NUMERIC(19) NOT NULL,
   resource_id NUMERIC(19) NOT NULL,
+  upgrade_id NUMERIC(19),
   cluster_info VARCHAR(255) NOT NULL,
   cluster_name VARCHAR(100) NOT NULL UNIQUE,
   provisioning_state VARCHAR(255) NOT NULL DEFAULT 'INIT',
@@ -165,6 +166,7 @@ CREATE TABLE servicecomponentdesiredstate (
   component_name VARCHAR(255) NOT NULL,
   cluster_id NUMERIC(19) NOT NULL,
   desired_stack_id NUMERIC(19) NOT NULL,
+  desired_version VARCHAR(255) NOT NULL DEFAULT 'UNKNOWN',
   desired_state VARCHAR(255) NOT NULL,
   service_name VARCHAR(255) NOT NULL,
   recovery_enabled SMALLINT NOT NULL DEFAULT 0,
@@ -671,6 +673,58 @@ CREATE TABLE setting (
   PRIMARY KEY (id)
 );
 
+
+-- upgrade tables
+CREATE TABLE upgrade (
+  upgrade_id NUMERIC(19) NOT NULL,
+  cluster_id NUMERIC(19) NOT NULL,
+  request_id NUMERIC(19) NOT NULL,
+  from_version VARCHAR(255) DEFAULT '' NOT NULL,
+  to_version VARCHAR(255) DEFAULT '' NOT NULL,
+  direction VARCHAR(255) DEFAULT 'UPGRADE' NOT NULL,
+  skip_failures BIT NOT NULL DEFAULT 0,
+  skip_sc_failures BIT NOT NULL DEFAULT 0,
+  downgrade_allowed BIT NOT NULL DEFAULT 1,
+  PRIMARY KEY (upgrade_id),
+  FOREIGN KEY (cluster_id) REFERENCES clusters(cluster_id),
+  FOREIGN KEY (request_id) REFERENCES request(request_id)
+);
+
+CREATE TABLE upgrade_group (
+  upgrade_group_id NUMERIC(19) NOT NULL,
+  upgrade_id NUMERIC(19) NOT NULL,
+  group_name VARCHAR(255) DEFAULT '' NOT NULL,
+  group_title VARCHAR(1024) DEFAULT '' NOT NULL,
+  PRIMARY KEY (upgrade_group_id),
+  FOREIGN KEY (upgrade_id) REFERENCES upgrade(upgrade_id)
+);
+
+CREATE TABLE upgrade_item (
+  upgrade_item_id NUMERIC(19) NOT NULL,
+  upgrade_group_id NUMERIC(19) NOT NULL,
+  stage_id NUMERIC(19) NOT NULL,
+  state VARCHAR(255) DEFAULT 'NONE' NOT NULL,
+  hosts TEXT,
+  tasks TEXT,
+  item_text VARCHAR(1024),
+  PRIMARY KEY (upgrade_item_id),
+  FOREIGN KEY (upgrade_group_id) REFERENCES upgrade_group(upgrade_group_id)
+);
+
+CREATE TABLE servicecomponent_history(
+  id NUMERIC(19) NOT NULL,
+  component_id NUMERIC(19) NOT NULL,
+  upgrade_id NUMERIC(19) NOT NULL,
+  from_stack_id NUMERIC(19) NOT NULL,
+  to_stack_id NUMERIC(19) NOT NULL,
+  CONSTRAINT PK_sc_history PRIMARY KEY (id),
+  CONSTRAINT FK_sc_history_component_id FOREIGN KEY (component_id) REFERENCES servicecomponentdesiredstate (id),
+  CONSTRAINT FK_sc_history_upgrade_id FOREIGN KEY (upgrade_id) REFERENCES upgrade (upgrade_id),
+  CONSTRAINT FK_sc_history_from_stack_id FOREIGN KEY (from_stack_id) REFERENCES stack (stack_id),
+  CONSTRAINT FK_sc_history_to_stack_id FOREIGN KEY (to_stack_id) REFERENCES stack (stack_id)
+);
+
+
 -- tasks indices --
 CREATE INDEX idx_stage_request_id ON stage (request_id);
 CREATE INDEX idx_hrc_request_id ON host_role_command (request_id);
@@ -696,6 +750,7 @@ ALTER TABLE stack ADD CONSTRAINT unq_stack UNIQUE (stack_name, stack_version);
 -- Note, Oracle has a limitation of 32 chars in the FK name, and we should use the same FK name in all DB types.
 ALTER TABLE members ADD CONSTRAINT FK_members_group_id FOREIGN KEY (group_id) REFERENCES groups (group_id);
 ALTER TABLE members ADD CONSTRAINT FK_members_user_id FOREIGN KEY (user_id) REFERENCES users (user_id);
+ALTER TABLE clusters ADD CONSTRAINT FK_clusters_upgrade_id FOREIGN KEY (upgrade_id) REFERENCES upgrade (upgrade_id);
 ALTER TABLE clusterconfig ADD CONSTRAINT FK_clusterconfig_cluster_id FOREIGN KEY (cluster_id) REFERENCES clusters (cluster_id);
 ALTER TABLE clusterservices ADD CONSTRAINT FK_clusterservices_cluster_id FOREIGN KEY (cluster_id) REFERENCES clusters (cluster_id);
 ALTER TABLE clusterconfigmapping ADD CONSTRAINT clusterconfigmappingcluster_id FOREIGN KEY (cluster_id) REFERENCES clusters (cluster_id);
@@ -915,56 +970,6 @@ CREATE INDEX idx_alert_history_state on alert_history(alert_state);
 CREATE INDEX idx_alert_group_name on alert_group(group_name);
 CREATE INDEX idx_alert_notice_state on alert_notice(notify_state);
 
--- upgrade tables
-CREATE TABLE upgrade (
-  upgrade_id NUMERIC(19) NOT NULL,
-  cluster_id NUMERIC(19) NOT NULL,
-  request_id NUMERIC(19) NOT NULL,
-  from_version VARCHAR(255) DEFAULT '' NOT NULL,
-  to_version VARCHAR(255) DEFAULT '' NOT NULL,
-  direction VARCHAR(255) DEFAULT 'UPGRADE' NOT NULL,
-  skip_failures BIT NOT NULL DEFAULT 0,
-  skip_sc_failures BIT NOT NULL DEFAULT 0,
-  downgrade_allowed BIT NOT NULL DEFAULT 1,
-  PRIMARY KEY (upgrade_id),
-  FOREIGN KEY (cluster_id) REFERENCES clusters(cluster_id),
-  FOREIGN KEY (request_id) REFERENCES request(request_id)
-);
-
-CREATE TABLE upgrade_group (
-  upgrade_group_id NUMERIC(19) NOT NULL,
-  upgrade_id NUMERIC(19) NOT NULL,
-  group_name VARCHAR(255) DEFAULT '' NOT NULL,
-  group_title VARCHAR(1024) DEFAULT '' NOT NULL,
-  PRIMARY KEY (upgrade_group_id),
-  FOREIGN KEY (upgrade_id) REFERENCES upgrade(upgrade_id)
-);
-
-CREATE TABLE upgrade_item (
-  upgrade_item_id NUMERIC(19) NOT NULL,
-  upgrade_group_id NUMERIC(19) NOT NULL,
-  stage_id NUMERIC(19) NOT NULL,
-  state VARCHAR(255) DEFAULT 'NONE' NOT NULL,
-  hosts TEXT,
-  tasks TEXT,
-  item_text VARCHAR(1024),
-  PRIMARY KEY (upgrade_item_id),
-  FOREIGN KEY (upgrade_group_id) REFERENCES upgrade_group(upgrade_group_id)
-);
-
-CREATE TABLE servicecomponent_history(
-  id NUMERIC(19) NOT NULL,
-  component_id NUMERIC(19) NOT NULL,
-  upgrade_id NUMERIC(19) NOT NULL,
-  from_stack_id NUMERIC(19) NOT NULL,
-  to_stack_id NUMERIC(19) NOT NULL,
-  CONSTRAINT PK_sc_history PRIMARY KEY (id),
-  CONSTRAINT FK_sc_history_component_id FOREIGN KEY (component_id) REFERENCES servicecomponentdesiredstate (id),
-  CONSTRAINT FK_sc_history_upgrade_id FOREIGN KEY (upgrade_id) REFERENCES upgrade (upgrade_id),
-  CONSTRAINT FK_sc_history_from_stack_id FOREIGN KEY (from_stack_id) REFERENCES stack (stack_id),
-  CONSTRAINT FK_sc_history_to_stack_id FOREIGN KEY (to_stack_id) REFERENCES stack (stack_id)
-);
-
 -- In order for the first ID to be 1, must initialize the ambari_sequences table with a sequence_value of 0.
 INSERT INTO ambari_sequences(sequence_name, sequence_value) values ('cluster_id_seq', 1);
 INSERT INTO ambari_sequences(sequence_name, sequence_value) values ('host_id_seq', 0);

http://git-wip-us.apache.org/repos/asf/ambari/blob/cbef0c14/ambari-server/src/main/resources/Ambari-DDL-SQLServer-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-SQLServer-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-SQLServer-CREATE.sql
index 43419c1..f60f07a 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-SQLServer-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-SQLServer-CREATE.sql
@@ -39,6 +39,7 @@ CREATE TABLE stack(
 CREATE TABLE clusters (
   cluster_id BIGINT NOT NULL,
   resource_id BIGINT NOT NULL,
+  upgrade_id BIGINT,
   cluster_info VARCHAR(255) NOT NULL,
   cluster_name VARCHAR(100) NOT NULL UNIQUE,
   provisioning_state VARCHAR(255) NOT NULL DEFAULT 'INIT',
@@ -185,6 +186,7 @@ CREATE TABLE servicecomponentdesiredstate (
   component_name VARCHAR(255) NOT NULL,
   cluster_id BIGINT NOT NULL,
   desired_stack_id BIGINT NOT NULL,
+  desired_version VARCHAR(255) NOT NULL DEFAULT 'UNKNOWN',
   desired_state VARCHAR(255) NOT NULL,
   service_name VARCHAR(255) NOT NULL,
   CONSTRAINT pk_sc_desiredstate PRIMARY KEY (id),
@@ -778,6 +780,60 @@ CREATE TABLE setting (
   PRIMARY KEY (id)
 );
 
+
+-- upgrade tables
+CREATE TABLE upgrade (
+  upgrade_id BIGINT NOT NULL,
+  cluster_id BIGINT NOT NULL,
+  request_id BIGINT NOT NULL,
+  from_version VARCHAR(255) DEFAULT '' NOT NULL,
+  to_version VARCHAR(255) DEFAULT '' NOT NULL,
+  direction VARCHAR(255) DEFAULT 'UPGRADE' NOT NULL,
+  upgrade_package VARCHAR(255) NOT NULL,
+  upgrade_type VARCHAR(32) NOT NULL,
+  skip_failures BIT NOT NULL DEFAULT 0,
+  skip_sc_failures BIT NOT NULL DEFAULT 0,
+  downgrade_allowed BIT NOT NULL DEFAULT 1,
+  PRIMARY KEY CLUSTERED (upgrade_id),
+  FOREIGN KEY (cluster_id) REFERENCES clusters(cluster_id),
+  FOREIGN KEY (request_id) REFERENCES request(request_id)
+);
+
+CREATE TABLE upgrade_group (
+  upgrade_group_id BIGINT NOT NULL,
+  upgrade_id BIGINT NOT NULL,
+  group_name VARCHAR(255) DEFAULT '' NOT NULL,
+  group_title VARCHAR(1024) DEFAULT '' NOT NULL,
+  PRIMARY KEY CLUSTERED (upgrade_group_id),
+  FOREIGN KEY (upgrade_id) REFERENCES upgrade(upgrade_id)
+);
+
+CREATE TABLE upgrade_item (
+  upgrade_item_id BIGINT NOT NULL,
+  upgrade_group_id BIGINT NOT NULL,
+  stage_id BIGINT NOT NULL,
+  state VARCHAR(255) DEFAULT 'NONE' NOT NULL,
+  hosts TEXT,
+  tasks TEXT,
+  item_text VARCHAR(1024),
+  PRIMARY KEY CLUSTERED (upgrade_item_id),
+  FOREIGN KEY (upgrade_group_id) REFERENCES upgrade_group(upgrade_group_id)
+);
+
+CREATE TABLE servicecomponent_history(
+  id BIGINT NOT NULL,
+  component_id BIGINT NOT NULL,
+  upgrade_id BIGINT NOT NULL,
+  from_stack_id BIGINT NOT NULL,
+  to_stack_id BIGINT NOT NULL,
+  CONSTRAINT PK_sc_history PRIMARY KEY (id),
+  CONSTRAINT FK_sc_history_component_id FOREIGN KEY (component_id) REFERENCES servicecomponentdesiredstate (id),
+  CONSTRAINT FK_sc_history_upgrade_id FOREIGN KEY (upgrade_id) REFERENCES upgrade (upgrade_id),
+  CONSTRAINT FK_sc_history_from_stack_id FOREIGN KEY (from_stack_id) REFERENCES stack (stack_id),
+  CONSTRAINT FK_sc_history_to_stack_id FOREIGN KEY (to_stack_id) REFERENCES stack (stack_id)
+);
+
+
 -- tasks indices --
 CREATE INDEX idx_stage_request_id ON stage (request_id);
 CREATE INDEX idx_hrc_request_id ON host_role_command (request_id);
@@ -805,6 +861,7 @@ ALTER TABLE stack ADD CONSTRAINT unq_stack UNIQUE (stack_name, stack_version);
 -- Note, Oracle has a limitation of 32 chars in the FK name, and we should use the same FK name in all DB types.
 ALTER TABLE members ADD CONSTRAINT FK_members_group_id FOREIGN KEY (group_id) REFERENCES groups (group_id);
 ALTER TABLE members ADD CONSTRAINT FK_members_user_id FOREIGN KEY (user_id) REFERENCES users (user_id);
+ALTER TABLE clusters ADD CONSTRAINT FK_clusters_upgrade_id FOREIGN KEY (upgrade_id) REFERENCES upgrade (upgrade_id);
 ALTER TABLE clusterconfig ADD CONSTRAINT FK_clusterconfig_cluster_id FOREIGN KEY (cluster_id) REFERENCES clusters (cluster_id);
 ALTER TABLE clusterservices ADD CONSTRAINT FK_clusterservices_cluster_id FOREIGN KEY (cluster_id) REFERENCES clusters (cluster_id);
 ALTER TABLE clusterconfigmapping ADD CONSTRAINT clusterconfigmappingcluster_id FOREIGN KEY (cluster_id) REFERENCES clusters (cluster_id);
@@ -1024,58 +1081,6 @@ CREATE INDEX idx_alert_history_state on alert_history(alert_state);
 CREATE INDEX idx_alert_group_name on alert_group(group_name);
 CREATE INDEX idx_alert_notice_state on alert_notice(notify_state);
 
--- upgrade tables
-CREATE TABLE upgrade (
-  upgrade_id BIGINT NOT NULL,
-  cluster_id BIGINT NOT NULL,
-  request_id BIGINT NOT NULL,
-  from_version VARCHAR(255) DEFAULT '' NOT NULL,
-  to_version VARCHAR(255) DEFAULT '' NOT NULL,
-  direction VARCHAR(255) DEFAULT 'UPGRADE' NOT NULL,
-  upgrade_package VARCHAR(255) NOT NULL,
-  upgrade_type VARCHAR(32) NOT NULL,
-  skip_failures BIT NOT NULL DEFAULT 0,
-  skip_sc_failures BIT NOT NULL DEFAULT 0,
-  downgrade_allowed BIT NOT NULL DEFAULT 1,
-  PRIMARY KEY CLUSTERED (upgrade_id),
-  FOREIGN KEY (cluster_id) REFERENCES clusters(cluster_id),
-  FOREIGN KEY (request_id) REFERENCES request(request_id)
-);
-
-CREATE TABLE upgrade_group (
-  upgrade_group_id BIGINT NOT NULL,
-  upgrade_id BIGINT NOT NULL,
-  group_name VARCHAR(255) DEFAULT '' NOT NULL,
-  group_title VARCHAR(1024) DEFAULT '' NOT NULL,
-  PRIMARY KEY CLUSTERED (upgrade_group_id),
-  FOREIGN KEY (upgrade_id) REFERENCES upgrade(upgrade_id)
-);
-
-CREATE TABLE upgrade_item (
-  upgrade_item_id BIGINT NOT NULL,
-  upgrade_group_id BIGINT NOT NULL,
-  stage_id BIGINT NOT NULL,
-  state VARCHAR(255) DEFAULT 'NONE' NOT NULL,
-  hosts TEXT,
-  tasks TEXT,
-  item_text VARCHAR(1024),
-  PRIMARY KEY CLUSTERED (upgrade_item_id),
-  FOREIGN KEY (upgrade_group_id) REFERENCES upgrade_group(upgrade_group_id)
-);
-
-CREATE TABLE servicecomponent_history(
-  id BIGINT NOT NULL,
-  component_id BIGINT NOT NULL,
-  upgrade_id BIGINT NOT NULL,
-  from_stack_id BIGINT NOT NULL,
-  to_stack_id BIGINT NOT NULL,
-  CONSTRAINT PK_sc_history PRIMARY KEY (id),
-  CONSTRAINT FK_sc_history_component_id FOREIGN KEY (component_id) REFERENCES servicecomponentdesiredstate (id),
-  CONSTRAINT FK_sc_history_upgrade_id FOREIGN KEY (upgrade_id) REFERENCES upgrade (upgrade_id),
-  CONSTRAINT FK_sc_history_from_stack_id FOREIGN KEY (from_stack_id) REFERENCES stack (stack_id),
-  CONSTRAINT FK_sc_history_to_stack_id FOREIGN KEY (to_stack_id) REFERENCES stack (stack_id)
-);
-
 ---------inserting some data-----------
 BEGIN TRANSACTION
   INSERT INTO ambari_sequences (sequence_name, [sequence_value])

http://git-wip-us.apache.org/repos/asf/ambari/blob/cbef0c14/ambari-server/src/test/java/org/apache/ambari/server/StateRecoveryManagerTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/StateRecoveryManagerTest.java b/ambari-server/src/test/java/org/apache/ambari/server/StateRecoveryManagerTest.java
index 0e9b18d..44008de 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/StateRecoveryManagerTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/StateRecoveryManagerTest.java
@@ -89,9 +89,6 @@ public class StateRecoveryManagerTest {
       add(getHostVersionMock("installing_version", RepositoryVersionState.INSTALLING, installingHostVersionCapture));
       add(getHostVersionMock("installed_version", RepositoryVersionState.INSTALLED, installedHostVersionCapture));
       add(getHostVersionMock("out_of_sync_version", RepositoryVersionState.OUT_OF_SYNC, outOfSyncHostVersionCapture));
-      add(getHostVersionMock("upgrade_failed_version", RepositoryVersionState.UPGRADE_FAILED, upgradeFailedHostVersionCapture));
-      add(getHostVersionMock("upgrading_version", RepositoryVersionState.UPGRADING, upgradingHostVersionCapture));
-      add(getHostVersionMock("upgraded_version", RepositoryVersionState.UPGRADED, upgradedHostVersionCapture));
       add(getHostVersionMock("current_version", RepositoryVersionState.CURRENT, currentHostVersionCapture));
     }});
 
@@ -111,9 +108,6 @@ public class StateRecoveryManagerTest {
       add(getClusterVersionMock("installing_version", RepositoryVersionState.INSTALLING, installingClusterVersionCapture));
       add(getClusterVersionMock("installed_version", RepositoryVersionState.INSTALLED, installedClusterVersionCapture));
       add(getClusterVersionMock("out_of_sync_version", RepositoryVersionState.OUT_OF_SYNC, outOfSyncClusterVersionCapture));
-      add(getClusterVersionMock("upgrade_failed_version", RepositoryVersionState.UPGRADE_FAILED, upgradeFailedClusterVersionCapture));
-      add(getClusterVersionMock("upgrading_version", RepositoryVersionState.UPGRADING, upgradingClusterVersionCapture));
-      add(getClusterVersionMock("upgraded_version", RepositoryVersionState.UPGRADED, upgradedClusterVersionCapture));
       add(getClusterVersionMock("current_version", RepositoryVersionState.CURRENT, currentClusterVersionCapture));
     }});
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/cbef0c14/ambari-server/src/test/java/org/apache/ambari/server/agent/TestHeartbeatHandler.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/agent/TestHeartbeatHandler.java b/ambari-server/src/test/java/org/apache/ambari/server/agent/TestHeartbeatHandler.java
index 2a4cec8..b6d51af 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/agent/TestHeartbeatHandler.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/agent/TestHeartbeatHandler.java
@@ -2412,7 +2412,7 @@ public class TestHeartbeatHandler {
     cluster.setCurrentStackVersion(stackId);
     helper.getOrCreateRepositoryVersion(stackId, stackId.getStackVersion());
     cluster.createClusterVersion(stackId, stackId.getStackVersion(), "admin",
-        RepositoryVersionState.UPGRADING);
+        RepositoryVersionState.INSTALLING);
 
     Set<String> hostNames = new HashSet<String>(){{
       add(DummyHostname1);

http://git-wip-us.apache.org/repos/asf/ambari/blob/cbef0c14/ambari-server/src/test/java/org/apache/ambari/server/agent/TestHeartbeatMonitor.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/agent/TestHeartbeatMonitor.java b/ambari-server/src/test/java/org/apache/ambari/server/agent/TestHeartbeatMonitor.java
index 9c59cde..a5396d8 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/agent/TestHeartbeatMonitor.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/agent/TestHeartbeatMonitor.java
@@ -118,7 +118,7 @@ public class TestHeartbeatMonitor {
     Cluster cluster = clusters.getCluster(clusterName);
     helper.getOrCreateRepositoryVersion(stackId, stackId.getStackVersion());
     cluster.createClusterVersion(stackId, stackId.getStackVersion(), "admin",
-        RepositoryVersionState.UPGRADING);
+        RepositoryVersionState.INSTALLING);
     Set<String> hostNames = new HashSet<String>(){{
       add(hostname1);
       add(hostname2);
@@ -205,7 +205,7 @@ public class TestHeartbeatMonitor {
 
     helper.getOrCreateRepositoryVersion(stackId, stackId.getStackVersion());
     cluster.createClusterVersion(stackId, stackId.getStackVersion(), "admin",
-        RepositoryVersionState.UPGRADING);
+        RepositoryVersionState.INSTALLING);
     Set<String> hostNames = new HashSet<String>() {{
       add(hostname1);
       add(hostname2);
@@ -320,7 +320,7 @@ public class TestHeartbeatMonitor {
 
     helper.getOrCreateRepositoryVersion(stackId, stackId.getStackVersion());
     cluster.createClusterVersion(stackId, stackId.getStackVersion(), "admin",
-        RepositoryVersionState.UPGRADING);
+        RepositoryVersionState.INSTALLING);
 
     Set<String> hostNames = new HashSet<String>(){{
       add(hostname1);
@@ -438,7 +438,7 @@ public class TestHeartbeatMonitor {
 
     helper.getOrCreateRepositoryVersion(stackId, stackId.getStackVersion());
     cluster.createClusterVersion(stackId, stackId.getStackVersion(), "admin",
-        RepositoryVersionState.UPGRADING);
+        RepositoryVersionState.INSTALLING);
 
     Set<String> hostNames = new HashSet<String>(){{
       add(hostname1);
@@ -559,7 +559,7 @@ public class TestHeartbeatMonitor {
     cluster.setDesiredStackVersion(stackId);
     helper.getOrCreateRepositoryVersion(stackId, stackId.getStackVersion());
     cluster.createClusterVersion(stackId, stackId.getStackVersion(), "admin",
-        RepositoryVersionState.UPGRADING);
+        RepositoryVersionState.INSTALLING);
 
     Set<String> hostNames = new HashSet<String>(){{
       add(hostname1);

http://git-wip-us.apache.org/repos/asf/ambari/blob/cbef0c14/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java
index 992150c..498fddf 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java
@@ -959,7 +959,7 @@ public class AmbariManagementControllerTest {
     c1.setDesiredStackVersion(stackId);
     helper.getOrCreateRepositoryVersion(stackId, stackId.getStackVersion());
     c1.createClusterVersion(stackId, stackId.getStackVersion(), "admin",
-        RepositoryVersionState.UPGRADING);
+        RepositoryVersionState.INSTALLING);
     Service s1 = serviceFactory.createNew(c1, "HDFS");
     Service s2 = serviceFactory.createNew(c1, "MAPREDUCE");
     c1.addService(s1);
@@ -1256,7 +1256,7 @@ public class AmbariManagementControllerTest {
 
     helper.getOrCreateRepositoryVersion(stackId, stackId.getStackVersion());
     c1.createClusterVersion(stackId, stackId.getStackVersion(), "admin",
-        RepositoryVersionState.UPGRADING);
+        RepositoryVersionState.INSTALLING);
 
     Service s1 = serviceFactory.createNew(c1, "HDFS");
     Service s2 = serviceFactory.createNew(c1, "MAPREDUCE");
@@ -1528,21 +1528,21 @@ public class AmbariManagementControllerTest {
     foo.setDesiredStackVersion(stackId);
     foo.setCurrentStackVersion(stackId);
     foo.createClusterVersion(stackId, stackId.getStackVersion(), "admin",
-        RepositoryVersionState.UPGRADING);
+        RepositoryVersionState.INSTALLING);
     foo.transitionClusterVersion(stackId, stackId.getStackVersion(), RepositoryVersionState.CURRENT);
 
     stackId = new StackId("HDP-0.2");
     c1.setDesiredStackVersion(stackId);
     c1.setCurrentStackVersion(stackId);
     c1.createClusterVersion(stackId, stackId.getStackVersion(), "admin",
-        RepositoryVersionState.UPGRADING);
+        RepositoryVersionState.INSTALLING);
     c1.transitionClusterVersion(stackId, stackId.getStackVersion(), RepositoryVersionState.CURRENT);
 
     stackId = new StackId("HDP-0.2");
     c2.setDesiredStackVersion(stackId);
     c2.setCurrentStackVersion(stackId);
     c2.createClusterVersion(stackId, stackId.getStackVersion(), "admin",
-        RepositoryVersionState.UPGRADING);
+        RepositoryVersionState.INSTALLING);
     c2.transitionClusterVersion(stackId, stackId.getStackVersion(), RepositoryVersionState.CURRENT);
 
     try {
@@ -1753,7 +1753,7 @@ public class AmbariManagementControllerTest {
     c.setCurrentStackVersion(stackId);
     helper.getOrCreateRepositoryVersion(stackId, stackId.getStackVersion());
     c.createClusterVersion(stackId, stackId.getStackVersion(), "admin",
-        RepositoryVersionState.UPGRADING);
+        RepositoryVersionState.INSTALLING);
 
     HostResourceProviderTest.createHosts(controller, requests);
 
@@ -1777,7 +1777,7 @@ public class AmbariManagementControllerTest {
     c.setCurrentStackVersion(stackID);
     helper.getOrCreateRepositoryVersion(stackID, stackID.getStackVersion());
     c.createClusterVersion(stackID, stackID.getStackVersion(), "admin",
-        RepositoryVersionState.UPGRADING);
+        RepositoryVersionState.INSTALLING);
 
     setOsFamily(clusters.getHost("h1"), "redhat", "5.9");
     setOsFamily(clusters.getHost("h2"), "redhat", "5.9");
@@ -2125,7 +2125,7 @@ public class AmbariManagementControllerTest {
     c1.setDesiredStackVersion(stackId);
     helper.getOrCreateRepositoryVersion(stackId, stackId.getStackVersion());
     c1.createClusterVersion(stackId, stackId.getStackVersion(), "admin",
-        RepositoryVersionState.UPGRADING);
+        RepositoryVersionState.INSTALLING);
 
     ClusterRequest r = new ClusterRequest(null, null, null, null);
     Set<ClusterResponse> resp = controller.getClusters(Collections.singleton(r));
@@ -7988,7 +7988,7 @@ public class AmbariManagementControllerTest {
 
     helper.getOrCreateRepositoryVersion(stackID, stackID.getStackVersion());
     c.createClusterVersion(stackID, stackID.getStackVersion(), "admin",
-        RepositoryVersionState.UPGRADING);
+        RepositoryVersionState.INSTALLING);
     clusters.addHost(hostName1);
     setOsFamily(clusters.getHost("h1"), "redhat", "5.9");
     clusters.getHost(hostName1).persist();

http://git-wip-us.apache.org/repos/asf/ambari/blob/cbef0c14/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/StackDefinedPropertyProviderTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/StackDefinedPropertyProviderTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/StackDefinedPropertyProviderTest.java
index 9b27447..6f5fdff 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/StackDefinedPropertyProviderTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/StackDefinedPropertyProviderTest.java
@@ -132,7 +132,7 @@ public class StackDefinedPropertyProviderTest {
     cluster.setDesiredStackVersion(stackId);
     helper.getOrCreateRepositoryVersion(stackId, stackId.getStackVersion());
     cluster.createClusterVersion(stackId, stackId.getStackVersion(), "admin",
-        RepositoryVersionState.UPGRADING);
+        RepositoryVersionState.INSTALLING);
 
     clusters.addHost("h1");
     Host host = clusters.getHost("h1");

http://git-wip-us.apache.org/repos/asf/ambari/blob/cbef0c14/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/UpgradeResourceProviderHDP22Test.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/UpgradeResourceProviderHDP22Test.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/UpgradeResourceProviderHDP22Test.java
index b0e3467..654fbd9 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/UpgradeResourceProviderHDP22Test.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/UpgradeResourceProviderHDP22Test.java
@@ -210,7 +210,7 @@ public class UpgradeResourceProviderHDP22Test {
     Cluster cluster = clusters.getCluster("c1");
 
     helper.getOrCreateRepositoryVersion(stackId, stackId.getStackVersion());
-    cluster.createClusterVersion(stackId, stackId.getStackVersion(), "admin", RepositoryVersionState.UPGRADING);
+    cluster.createClusterVersion(stackId, stackId.getStackVersion(), "admin", RepositoryVersionState.INSTALLING);
     cluster.transitionClusterVersion(stackId, stackId.getStackVersion(), RepositoryVersionState.CURRENT);
 
     clusters.addHost("h1");

http://git-wip-us.apache.org/repos/asf/ambari/blob/cbef0c14/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/UpgradeResourceProviderTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/UpgradeResourceProviderTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/UpgradeResourceProviderTest.java
index 035c61a..5866453 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/UpgradeResourceProviderTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/UpgradeResourceProviderTest.java
@@ -199,7 +199,7 @@ public class UpgradeResourceProviderTest {
     helper.getOrCreateRepositoryVersion(stack211, stack211.getStackVersion());
     helper.getOrCreateRepositoryVersion(stack220, stack220.getStackVersion());
 
-    cluster.createClusterVersion(stack211, stack211.getStackVersion(), "admin", RepositoryVersionState.UPGRADING);
+    cluster.createClusterVersion(stack211, stack211.getStackVersion(), "admin", RepositoryVersionState.INSTALLING);
     cluster.transitionClusterVersion(stack211, stack211.getStackVersion(), RepositoryVersionState.CURRENT);
 
     clusters.addHost("h1");

http://git-wip-us.apache.org/repos/asf/ambari/blob/cbef0c14/ambari-server/src/test/java/org/apache/ambari/server/events/EventsTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/events/EventsTest.java b/ambari-server/src/test/java/org/apache/ambari/server/events/EventsTest.java
index ad53f70..785f0fb 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/events/EventsTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/events/EventsTest.java
@@ -114,7 +114,7 @@ public class EventsTest {
     m_cluster.setDesiredStackVersion(stackId);
     m_helper.getOrCreateRepositoryVersion(stackId, stackId.getStackVersion());
     m_cluster.createClusterVersion(stackId, stackId.getStackVersion(), "admin",
-        RepositoryVersionState.UPGRADING);
+        RepositoryVersionState.INSTALLING);
 
     m_clusters.mapHostToCluster(HOSTNAME, m_clusterName);
   }

http://git-wip-us.apache.org/repos/asf/ambari/blob/cbef0c14/ambari-server/src/test/java/org/apache/ambari/server/events/listeners/upgrade/HostVersionOutOfSyncListenerTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/events/listeners/upgrade/HostVersionOutOfSyncListenerTest.java b/ambari-server/src/test/java/org/apache/ambari/server/events/listeners/upgrade/HostVersionOutOfSyncListenerTest.java
index 772d68a..1bf9d83 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/events/listeners/upgrade/HostVersionOutOfSyncListenerTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/events/listeners/upgrade/HostVersionOutOfSyncListenerTest.java
@@ -102,7 +102,7 @@ public class HostVersionOutOfSyncListenerTest {
     addHost("h1");
 
     helper.getOrCreateRepositoryVersion(stackId, stackId.getStackVersion());
-    c1.createClusterVersion(stackId, stackId.getStackVersion(), "admin", RepositoryVersionState.UPGRADING);
+    c1.createClusterVersion(stackId, stackId.getStackVersion(), "admin", RepositoryVersionState.INSTALLING);
     c1.transitionClusterVersion(stackId, stackId.getStackVersion(), RepositoryVersionState.CURRENT);
     clusters.mapHostToCluster("h1", "c1");
   }
@@ -415,15 +415,15 @@ public class HostVersionOutOfSyncListenerTest {
     assertRepoVersionState(stackId.getStackId(), "2.2.9-9999", RepositoryVersionState.INSTALLED);
 
     // make it seems like we upgraded, but 1 host still hasn't finished
-    hv1.setState(RepositoryVersionState.UPGRADED);
-    hv2.setState(RepositoryVersionState.UPGRADING);
+    hv1.setState(RepositoryVersionState.INSTALLED);
+    hv2.setState(RepositoryVersionState.INSTALLING);
     hostVersionDAO.merge(hv1);
     hostVersionDAO.merge(hv2);
 
     // recalculate and ensure that the cluster is UPGRADING
     c1.recalculateAllClusterVersionStates();
     assertRepoVersionState(stackId.getStackId(), "2.2.0", RepositoryVersionState.CURRENT);
-    assertRepoVersionState(stackId.getStackId(), "2.2.9-9999", RepositoryVersionState.UPGRADING);
+    assertRepoVersionState(stackId.getStackId(), "2.2.9-9999", RepositoryVersionState.INSTALLING);
 
     // delete the host that was UPGRADING, and DON'T call recalculate; let the
     // event handle it
@@ -431,7 +431,7 @@ public class HostVersionOutOfSyncListenerTest {
     clusters.deleteHost("h2");
     injector.getInstance(UnitOfWork.class).end();
     assertRepoVersionState(stackId.getStackId(), "2.2.0", RepositoryVersionState.CURRENT);
-    assertRepoVersionState(stackId.getStackId(), "2.2.9-9999", RepositoryVersionState.UPGRADED);
+    assertRepoVersionState(stackId.getStackId(), "2.2.9-9999", RepositoryVersionState.INSTALLED);
   }
 
   private void addHost(String hostname) throws AmbariException {

http://git-wip-us.apache.org/repos/asf/ambari/blob/cbef0c14/ambari-server/src/test/java/org/apache/ambari/server/events/listeners/upgrade/StackVersionListenerTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/events/listeners/upgrade/StackVersionListenerTest.java b/ambari-server/src/test/java/org/apache/ambari/server/events/listeners/upgrade/StackVersionListenerTest.java
index ae05a6b..aa405ae 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/events/listeners/upgrade/StackVersionListenerTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/events/listeners/upgrade/StackVersionListenerTest.java
@@ -17,17 +17,23 @@
  */
 package org.apache.ambari.server.events.listeners.upgrade;
 
+import static org.easymock.EasyMock.anyObject;
+import static org.easymock.EasyMock.anyString;
 import static org.easymock.EasyMock.createNiceMock;
 import static org.easymock.EasyMock.expect;
 import static org.easymock.EasyMock.replay;
 import static org.easymock.EasyMock.verify;
 
-import org.apache.ambari.server.events.HostComponentVersionEvent;
+import org.apache.ambari.server.events.HostComponentVersionAdvertisedEvent;
 import org.apache.ambari.server.events.publishers.VersionEventPublisher;
 import org.apache.ambari.server.orm.entities.RepositoryVersionEntity;
 import org.apache.ambari.server.state.Cluster;
+import org.apache.ambari.server.state.Service;
+import org.apache.ambari.server.state.ServiceComponent;
 import org.apache.ambari.server.state.ServiceComponentHost;
+import org.apache.ambari.server.state.ServiceImpl;
 import org.easymock.EasyMock;
+import org.junit.Ignore;
 import org.junit.Test;
 
 /**
@@ -35,23 +41,36 @@ import org.junit.Test;
  */
 public class StackVersionListenerTest {
 
+  private static final String DESIRED_VERSION = "1.2.3.4-5678";
+  private static final String SERVICE_COMPONENT_NAME = "Some component name";
+  private static final String SERVICE_NAME = "Service name";
+
   @Test
   public void testOnAmbariEvent() throws Exception {
     VersionEventPublisher publisher = createNiceMock(VersionEventPublisher.class);
     Cluster cluster = createNiceMock(Cluster.class);
     ServiceComponentHost sch = createNiceMock(ServiceComponentHost.class);
     RepositoryVersionEntity repositoryVersionEntity = createNiceMock(RepositoryVersionEntity.class);
+    Service service = createNiceMock(Service.class);
+    ServiceComponent serviceComponent = createNiceMock(ServiceComponent.class);
+
+    expect(serviceComponent.getDesiredVersion()).andReturn(DESIRED_VERSION);
+    expect(service.getServiceComponent(SERVICE_COMPONENT_NAME)).andReturn(serviceComponent);
 
     expect(cluster.getClusterId()).andReturn(99L);
+    expect(cluster.getService(anyString())).andReturn(service);
+
+    expect(sch.getServiceName()).andReturn(SERVICE_NAME);
+    expect(sch.getServiceComponentName()).andReturn(SERVICE_COMPONENT_NAME);
     expect(sch.recalculateHostVersionState()).andReturn(repositoryVersionEntity).atLeastOnce();
 
     cluster.recalculateClusterVersionState(repositoryVersionEntity);
     EasyMock.expectLastCall().atLeastOnce();
 
     // Replay and assert expectations
-    replay(cluster, sch);
+    replay(cluster, sch, serviceComponent, service);
 
-    HostComponentVersionEvent event = new HostComponentVersionEvent(cluster, sch);
+    HostComponentVersionAdvertisedEvent event = new HostComponentVersionAdvertisedEvent(cluster, sch, DESIRED_VERSION);
     StackVersionListener listener = new StackVersionListener(publisher);
 
     listener.onAmbariEvent(event);

http://git-wip-us.apache.org/repos/asf/ambari/blob/cbef0c14/ambari-server/src/test/java/org/apache/ambari/server/events/publishers/VersionEventPublisherTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/events/publishers/VersionEventPublisherTest.java b/ambari-server/src/test/java/org/apache/ambari/server/events/publishers/VersionEventPublisherTest.java
index 071c6f0..5597f25 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/events/publishers/VersionEventPublisherTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/events/publishers/VersionEventPublisherTest.java
@@ -22,7 +22,7 @@ import com.google.common.eventbus.Subscribe;
 import com.google.inject.Guice;
 import com.google.inject.Inject;
 import com.google.inject.Injector;
-import org.apache.ambari.server.events.HostComponentVersionEvent;
+import org.apache.ambari.server.events.HostComponentVersionAdvertisedEvent;
 import org.apache.ambari.server.state.Cluster;
 import org.apache.ambari.server.state.ServiceComponentHost;
 import org.junit.Before;
@@ -60,7 +60,7 @@ public class VersionEventPublisherTest {
 
     Listener listener = injector.getInstance(Listener.class);
 
-    HostComponentVersionEvent event = new HostComponentVersionEvent(cluster, sch);
+    HostComponentVersionAdvertisedEvent event = new HostComponentVersionAdvertisedEvent(cluster, sch, "1.2.3.4-5678");
 
     publisher.publish(event);
 
@@ -71,7 +71,7 @@ public class VersionEventPublisherTest {
 
   private static class Listener {
 
-    private HostComponentVersionEvent lastEvent = null;
+    private HostComponentVersionAdvertisedEvent lastEvent = null;
 
     @Inject
     private Listener(VersionEventPublisher eventPublisher) {
@@ -79,11 +79,11 @@ public class VersionEventPublisherTest {
     }
 
     @Subscribe
-    public void onEvent(HostComponentVersionEvent event) {
+    public void onEvent(HostComponentVersionAdvertisedEvent event) {
       lastEvent = event;
     }
 
-    public HostComponentVersionEvent getLastEvent() {
+    public HostComponentVersionAdvertisedEvent getLastEvent() {
       return lastEvent;
     }
   }

http://git-wip-us.apache.org/repos/asf/ambari/blob/cbef0c14/ambari-server/src/test/java/org/apache/ambari/server/orm/OrmTestHelper.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/orm/OrmTestHelper.java b/ambari-server/src/test/java/org/apache/ambari/server/orm/OrmTestHelper.java
index 71b89c3..19146fd 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/orm/OrmTestHelper.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/orm/OrmTestHelper.java
@@ -376,7 +376,7 @@ public class OrmTestHelper {
     cluster.setDesiredStackVersion(stackId);
     getOrCreateRepositoryVersion(stackId, stackId.getStackVersion());
     cluster.createClusterVersion(stackId,
-        stackId.getStackVersion(), "admin", RepositoryVersionState.UPGRADING);
+        stackId.getStackVersion(), "admin", RepositoryVersionState.INSTALLING);
     return cluster;
   }
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/cbef0c14/ambari-server/src/test/java/org/apache/ambari/server/orm/dao/ClusterVersionDAOTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/orm/dao/ClusterVersionDAOTest.java b/ambari-server/src/test/java/org/apache/ambari/server/orm/dao/ClusterVersionDAOTest.java
index b57db5b..4edfdcb 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/orm/dao/ClusterVersionDAOTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/orm/dao/ClusterVersionDAOTest.java
@@ -109,7 +109,7 @@ public class ClusterVersionDAOTest {
     // Start upgrading C
     if (currStep >= 4) {
       if (lastStep <= 3) {
-        cvC = new ClusterVersionEntity(cluster, helper.getOrCreateRepositoryVersion(HDP_22_STACK, "2.2.0.0-100"), RepositoryVersionState.UPGRADING, System.currentTimeMillis(), "admin");
+        cvC = new ClusterVersionEntity(cluster, helper.getOrCreateRepositoryVersion(HDP_22_STACK, "2.2.0.0-100"), RepositoryVersionState.INSTALLING, System.currentTimeMillis(), "admin");
         clusterVersionDAO.create(cvC);
         cvCId = cvC.getId();
       } else {
@@ -119,14 +119,14 @@ public class ClusterVersionDAOTest {
 
     // Fail upgrade for C
     if (currStep >= 5 && lastStep <= 4) {
-        cvC.setState(RepositoryVersionState.UPGRADE_FAILED);
+        cvC.setState(RepositoryVersionState.INSTALL_FAILED);
         cvC.setEndTime(System.currentTimeMillis());
         clusterVersionDAO.merge(cvC);
     }
 
     // Retry upgrade on C
     if (currStep >= 6 && lastStep <= 5) {
-        cvC.setState(RepositoryVersionState.UPGRADING);
+        cvC.setState(RepositoryVersionState.INSTALLING);
         cvC.setEndTime(0L);
         clusterVersionDAO.merge(cvC);
     }
@@ -212,44 +212,44 @@ public class ClusterVersionDAOTest {
     createRecordsUntilStep(1);
     Assert.assertEquals(1, clusterVersionDAO.findByClusterAndState(cluster.getClusterName(), RepositoryVersionState.CURRENT).size());
     Assert.assertEquals(0, clusterVersionDAO.findByClusterAndState(cluster.getClusterName(), RepositoryVersionState.INSTALLED).size());
-    Assert.assertEquals(0, clusterVersionDAO.findByClusterAndState(cluster.getClusterName(), RepositoryVersionState.UPGRADING).size());
-    Assert.assertEquals(0, clusterVersionDAO.findByClusterAndState(cluster.getClusterName(), RepositoryVersionState.UPGRADE_FAILED).size());
+    Assert.assertEquals(0, clusterVersionDAO.findByClusterAndState(cluster.getClusterName(), RepositoryVersionState.INSTALLING).size());
+    Assert.assertEquals(0, clusterVersionDAO.findByClusterAndState(cluster.getClusterName(), RepositoryVersionState.INSTALL_FAILED).size());
 
     createRecordsUntilStep(2);
     Assert.assertEquals(1, clusterVersionDAO.findByClusterAndState(cluster.getClusterName(), RepositoryVersionState.CURRENT).size());
     Assert.assertEquals(1, clusterVersionDAO.findByClusterAndState(cluster.getClusterName(), RepositoryVersionState.INSTALLED).size());
-    Assert.assertEquals(0, clusterVersionDAO.findByClusterAndState(cluster.getClusterName(), RepositoryVersionState.UPGRADING).size());
-    Assert.assertEquals(0, clusterVersionDAO.findByClusterAndState(cluster.getClusterName(), RepositoryVersionState.UPGRADE_FAILED).size());
+    Assert.assertEquals(0, clusterVersionDAO.findByClusterAndState(cluster.getClusterName(), RepositoryVersionState.INSTALLING).size());
+    Assert.assertEquals(0, clusterVersionDAO.findByClusterAndState(cluster.getClusterName(), RepositoryVersionState.INSTALL_FAILED).size());
 
     createRecordsUntilStep(3);
     Assert.assertEquals(1, clusterVersionDAO.findByClusterAndState(cluster.getClusterName(), RepositoryVersionState.CURRENT).size());
     Assert.assertEquals(1, clusterVersionDAO.findByClusterAndState(cluster.getClusterName(), RepositoryVersionState.INSTALLED).size());
-    Assert.assertEquals(0, clusterVersionDAO.findByClusterAndState(cluster.getClusterName(), RepositoryVersionState.UPGRADING).size());
-    Assert.assertEquals(0, clusterVersionDAO.findByClusterAndState(cluster.getClusterName(), RepositoryVersionState.UPGRADE_FAILED).size());
+    Assert.assertEquals(0, clusterVersionDAO.findByClusterAndState(cluster.getClusterName(), RepositoryVersionState.INSTALLING).size());
+    Assert.assertEquals(0, clusterVersionDAO.findByClusterAndState(cluster.getClusterName(), RepositoryVersionState.INSTALL_FAILED).size());
 
     createRecordsUntilStep(4);
     Assert.assertEquals(1, clusterVersionDAO.findByClusterAndState(cluster.getClusterName(), RepositoryVersionState.CURRENT).size());
     Assert.assertEquals(1, clusterVersionDAO.findByClusterAndState(cluster.getClusterName(), RepositoryVersionState.INSTALLED).size());
-    Assert.assertEquals(1, clusterVersionDAO.findByClusterAndState(cluster.getClusterName(), RepositoryVersionState.UPGRADING).size());
-    Assert.assertEquals(0, clusterVersionDAO.findByClusterAndState(cluster.getClusterName(), RepositoryVersionState.UPGRADE_FAILED).size());
+    Assert.assertEquals(1, clusterVersionDAO.findByClusterAndState(cluster.getClusterName(), RepositoryVersionState.INSTALLING).size());
+    Assert.assertEquals(0, clusterVersionDAO.findByClusterAndState(cluster.getClusterName(), RepositoryVersionState.INSTALL_FAILED).size());
 
     createRecordsUntilStep(5);
     Assert.assertEquals(1, clusterVersionDAO.findByClusterAndState(cluster.getClusterName(), RepositoryVersionState.CURRENT).size());
     Assert.assertEquals(1, clusterVersionDAO.findByClusterAndState(cluster.getClusterName(), RepositoryVersionState.INSTALLED).size());
-    Assert.assertEquals(0, clusterVersionDAO.findByClusterAndState(cluster.getClusterName(), RepositoryVersionState.UPGRADING).size());
-    Assert.assertEquals(1, clusterVersionDAO.findByClusterAndState(cluster.getClusterName(), RepositoryVersionState.UPGRADE_FAILED).size());
+    Assert.assertEquals(0, clusterVersionDAO.findByClusterAndState(cluster.getClusterName(), RepositoryVersionState.INSTALLING).size());
+    Assert.assertEquals(1, clusterVersionDAO.findByClusterAndState(cluster.getClusterName(), RepositoryVersionState.INSTALL_FAILED).size());
 
     createRecordsUntilStep(6);
     Assert.assertEquals(1, clusterVersionDAO.findByClusterAndState(cluster.getClusterName(), RepositoryVersionState.CURRENT).size());
     Assert.assertEquals(1, clusterVersionDAO.findByClusterAndState(cluster.getClusterName(), RepositoryVersionState.INSTALLED).size());
-    Assert.assertEquals(1, clusterVersionDAO.findByClusterAndState(cluster.getClusterName(), RepositoryVersionState.UPGRADING).size());
-    Assert.assertEquals(0, clusterVersionDAO.findByClusterAndState(cluster.getClusterName(), RepositoryVersionState.UPGRADE_FAILED).size());
+    Assert.assertEquals(1, clusterVersionDAO.findByClusterAndState(cluster.getClusterName(), RepositoryVersionState.INSTALLING).size());
+    Assert.assertEquals(0, clusterVersionDAO.findByClusterAndState(cluster.getClusterName(), RepositoryVersionState.INSTALL_FAILED).size());
 
     createRecordsUntilStep(7);
     Assert.assertEquals(1, clusterVersionDAO.findByClusterAndState(cluster.getClusterName(), RepositoryVersionState.CURRENT).size());
     Assert.assertEquals(2, clusterVersionDAO.findByClusterAndState(cluster.getClusterName(), RepositoryVersionState.INSTALLED).size());
-    Assert.assertEquals(0, clusterVersionDAO.findByClusterAndState(cluster.getClusterName(), RepositoryVersionState.UPGRADING).size());
-    Assert.assertEquals(0, clusterVersionDAO.findByClusterAndState(cluster.getClusterName(), RepositoryVersionState.UPGRADE_FAILED).size());
+    Assert.assertEquals(0, clusterVersionDAO.findByClusterAndState(cluster.getClusterName(), RepositoryVersionState.INSTALLING).size());
+    Assert.assertEquals(0, clusterVersionDAO.findByClusterAndState(cluster.getClusterName(), RepositoryVersionState.INSTALL_FAILED).size());
   }
 
   @After

http://git-wip-us.apache.org/repos/asf/ambari/blob/cbef0c14/ambari-server/src/test/java/org/apache/ambari/server/orm/dao/HostVersionDAOTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/orm/dao/HostVersionDAOTest.java b/ambari-server/src/test/java/org/apache/ambari/server/orm/dao/HostVersionDAOTest.java
index e8e5d20..d8e1576 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/orm/dao/HostVersionDAOTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/orm/dao/HostVersionDAOTest.java
@@ -195,10 +195,10 @@ public class HostVersionDAOTest {
         desiredState = RepositoryVersionState.INSTALLED;
       }
       if (i % 3 == 1) {
-        desiredState = RepositoryVersionState.UPGRADING;
+        desiredState = RepositoryVersionState.INSTALLING;
       }
       if (i % 3 == 2) {
-        desiredState = RepositoryVersionState.UPGRADE_FAILED;
+        desiredState = RepositoryVersionState.INSTALL_FAILED;
       }
 
 
@@ -269,8 +269,8 @@ public class HostVersionDAOTest {
   public void testFindByClusterHostAndState() {
     Assert.assertEquals(1, hostVersionDAO.findByClusterHostAndState("test_cluster1", "test_host1", RepositoryVersionState.CURRENT).size());
     Assert.assertEquals(0, hostVersionDAO.findByClusterHostAndState("test_cluster1", "test_host1", RepositoryVersionState.INSTALLED).size());
-    Assert.assertEquals(0, hostVersionDAO.findByClusterHostAndState("test_cluster1", "test_host2", RepositoryVersionState.UPGRADING).size());
-    Assert.assertEquals(0, hostVersionDAO.findByClusterHostAndState("test_cluster1", "test_host3", RepositoryVersionState.UPGRADE_FAILED).size());
+    Assert.assertEquals(0, hostVersionDAO.findByClusterHostAndState("test_cluster1", "test_host2", RepositoryVersionState.INSTALLING).size());
+    Assert.assertEquals(0, hostVersionDAO.findByClusterHostAndState("test_cluster1", "test_host3", RepositoryVersionState.INSTALL_FAILED).size());
 
     addMoreVersions();
 
@@ -279,8 +279,8 @@ public class HostVersionDAOTest {
     Assert.assertEquals(2, hostVersionDAO.findByClusterHostAndState("test_cluster1", "test_host3", RepositoryVersionState.INSTALLED).size());
 
     Assert.assertEquals(1, hostVersionDAO.findByClusterHostAndState("test_cluster1", "test_host1", RepositoryVersionState.CURRENT).size());
-    Assert.assertEquals(1, hostVersionDAO.findByClusterHostAndState("test_cluster1", "test_host2", RepositoryVersionState.UPGRADING).size());
-    Assert.assertEquals(1, hostVersionDAO.findByClusterHostAndState("test_cluster1", "test_host3", RepositoryVersionState.UPGRADE_FAILED).size());
+    Assert.assertEquals(1, hostVersionDAO.findByClusterHostAndState("test_cluster1", "test_host2", RepositoryVersionState.INSTALLING).size());
+    Assert.assertEquals(1, hostVersionDAO.findByClusterHostAndState("test_cluster1", "test_host3", RepositoryVersionState.INSTALL_FAILED).size());
   }
 
   /**
@@ -318,9 +318,9 @@ public class HostVersionDAOTest {
     HostVersionEntity hostVersionEntity1LastExpected = new HostVersionEntity(host1,
         helper.getOrCreateRepositoryVersion(HDP_22_STACK, repoVersion_2202), RepositoryVersionState.INSTALLED);
     HostVersionEntity hostVersionEntity2LastExpected = new HostVersionEntity(host2,
-        helper.getOrCreateRepositoryVersion(HDP_22_STACK, repoVersion_2202), RepositoryVersionState.UPGRADING);
+        helper.getOrCreateRepositoryVersion(HDP_22_STACK, repoVersion_2202), RepositoryVersionState.INSTALLING);
     HostVersionEntity hostVersionEntity3LastExpected = new HostVersionEntity(host3,
-        helper.getOrCreateRepositoryVersion(HDP_22_STACK, repoVersion_2202), RepositoryVersionState.UPGRADE_FAILED);
+        helper.getOrCreateRepositoryVersion(HDP_22_STACK, repoVersion_2202), RepositoryVersionState.INSTALL_FAILED);
 
     // Actual
     HostVersionEntity hostVersionEntity1LastActual = hostVersionDAO.findByClusterStackVersionAndHost("test_cluster1", HDP_22_STACK, repoVersion_2202, "test_host1");

http://git-wip-us.apache.org/repos/asf/ambari/blob/cbef0c14/ambari-server/src/test/java/org/apache/ambari/server/orm/dao/UpgradeDAOTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/orm/dao/UpgradeDAOTest.java b/ambari-server/src/test/java/org/apache/ambari/server/orm/dao/UpgradeDAOTest.java
index 3ad2240..cf79b6f 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/orm/dao/UpgradeDAOTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/orm/dao/UpgradeDAOTest.java
@@ -92,7 +92,7 @@ public class UpgradeDAOTest {
     items.add(item);
 
     item = new UpgradeItemEntity();
-    item.setState(UpgradeState.PENDING);
+    item.setState(UpgradeState.COMPLETE);  // TODO: is it a correct value for test context?
     item.setStageId(Long.valueOf(1L));
     items.add(item);
 


[08/50] [abbrv] ambari git commit: Merge branch 'trunk' into branch-dev-patch-upgrade

Posted by nc...@apache.org.
Merge branch 'trunk' into branch-dev-patch-upgrade


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/e7bdb7b0
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/e7bdb7b0
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/e7bdb7b0

Branch: refs/heads/trunk
Commit: e7bdb7b05478d39970680cc92c95177595ff3277
Parents: 986a518 f4edad8
Author: Nate Cole <nc...@hortonworks.com>
Authored: Wed Jan 27 10:33:04 2016 -0500
Committer: Nate Cole <nc...@hortonworks.com>
Committed: Wed Jan 27 10:33:04 2016 -0500

----------------------------------------------------------------------
 .../app/scripts/directives/editableList.js      |   9 ++
 .../src/main/python/core/host_info.py           |   8 +-
 ambari-server/pom.xml                           |  11 +-
 .../server/configuration/Configuration.java     |  13 ++
 .../ambari/server/controller/AmbariServer.java  |   2 +
 .../server/controller/ControllerModule.java     |   2 +
 .../controller/utilities/KerberosChecker.java   | 127 +++++++++++++++++++
 .../server/upgrade/SchemaUpgradeHelper.java     |   1 +
 .../server/upgrade/UpgradeCatalog222.java       | 125 ++++++++++++++++++
 .../server/upgrade/UpgradeCatalog230.java       |   2 +-
 .../server/upgrade/UpgradeCatalog240.java       |  26 ++--
 .../src/main/package/deb/control/postinst       |   6 +-
 .../src/main/package/deb/control/prerm          |   4 +-
 .../src/main/package/rpm/postinstall.sh         |   6 +-
 ambari-server/src/main/package/rpm/preremove.sh |   4 +-
 .../python/ambari_server/serverConfiguration.py |   5 +-
 .../main/python/ambari_server/setupSecurity.py  |   5 +-
 .../HBASE/0.96.0.2.0/alerts.json                |  37 ------
 .../OOZIE/4.0.0.2.0/configuration/oozie-env.xml |  10 ++
 .../OOZIE/4.0.0.2.0/package/scripts/oozie.py    |  14 ++
 .../4.0.0.2.0/package/scripts/params_linux.py   |   5 +
 .../4.0.0.2.0/package/templates/oozie.conf.j2   |  35 +++++
 .../stacks/HDP/2.0.6/services/stack_advisor.py  |   3 +
 .../utilities/KerberosCheckerTest.java          |  88 +++++++++++++
 .../server/upgrade/UpgradeCatalog222Test.java   | 104 +++++++++++++++
 .../server/upgrade/UpgradeCatalog230Test.java   |   2 +-
 .../stacks/2.0.6/OOZIE/test_oozie_client.py     |  33 +++++
 .../stacks/2.0.6/OOZIE/test_oozie_server.py     |  44 +++++++
 ambari-web/app/controllers/main/host/details.js |   7 +-
 ambari-web/app/messages.js                      |   2 +
 ambari-web/app/models/host_component.js         |   6 +
 ambari-web/app/styles/application.less          |   3 +
 .../main/host/details/host_component.hbs        |   3 +-
 ambari-web/app/utils/ember_computed.js          |   2 +-
 .../main/host/details/host_component_view.js    |  17 +++
 .../test/controllers/main/host/details_test.js  |  10 ++
 36 files changed, 713 insertions(+), 68 deletions(-)
----------------------------------------------------------------------



[15/50] [abbrv] ambari git commit: AMBARI-14797. Register Version: display loaded version definition info in different categories and Save them.(XIWANG)

Posted by nc...@apache.org.
AMBARI-14797. Register Version: display loaded version definition info in different categories and Save them.(XIWANG)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/0b475640
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/0b475640
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/0b475640

Branch: refs/heads/trunk
Commit: 0b47564002b295efb6be1b41aa66381561dbe7e4
Parents: e4d03b5
Author: Xi Wang <xi...@apache.org>
Authored: Wed Feb 3 15:58:18 2016 -0800
Committer: Xi Wang <xi...@apache.org>
Committed: Wed Feb 3 16:02:48 2016 -0800

----------------------------------------------------------------------
 .../app/assets/data/version/version.json        |   3 +
 .../stackVersions/StackVersionsCreateCtrl.js    |  46 ++---
 .../ui/admin-web/app/scripts/i18n.config.js     |  12 +-
 .../ui/admin-web/app/scripts/services/Stack.js  | 170 ++++++++++++++++++-
 .../resources/ui/admin-web/app/styles/main.css  |  15 ++
 .../views/stackVersions/stackVersionPage.html   |  87 +++++-----
 6 files changed, 269 insertions(+), 64 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/0b475640/ambari-admin/src/main/resources/ui/admin-web/app/assets/data/version/version.json
----------------------------------------------------------------------
diff --git a/ambari-admin/src/main/resources/ui/admin-web/app/assets/data/version/version.json b/ambari-admin/src/main/resources/ui/admin-web/app/assets/data/version/version.json
index b5b3601..d6530e2 100644
--- a/ambari-admin/src/main/resources/ui/admin-web/app/assets/data/version/version.json
+++ b/ambari-admin/src/main/resources/ui/admin-web/app/assets/data/version/version.json
@@ -26,6 +26,7 @@
             "services": [
               {
                 "name": "HDFS",
+                "display_name": "HDFS",
                 "versions": [
                   {
                     "version": "2.1.1",
@@ -36,6 +37,7 @@
               },
               {
                 "name": "HIVE",
+                "display_name": "Hive",
                 "versions": [
                   {
                     "version": "1.2.1"
@@ -44,6 +46,7 @@
               },
               {
                 "name": "ZOOKEEPER",
+                "display_name": "ZooKeeper",
                 "versions": [
                   {
                     "version": "3.4.5"

http://git-wip-us.apache.org/repos/asf/ambari/blob/0b475640/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/stackVersions/StackVersionsCreateCtrl.js
----------------------------------------------------------------------
diff --git a/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/stackVersions/StackVersionsCreateCtrl.js b/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/stackVersions/StackVersionsCreateCtrl.js
index 40aa103..81ad7f7 100644
--- a/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/stackVersions/StackVersionsCreateCtrl.js
+++ b/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/stackVersions/StackVersionsCreateCtrl.js
@@ -156,14 +156,14 @@ angular.module('ambariAdminConsole')
     index: 1,
     displayName: 'Upload Version Definition File',
     url: 'files://',
-    selected: true,
-    hasError: true
+    //selected: true,
+    hasError: false
   };
   $scope.option2 = {
     index: 2,
     displayName: 'Version Definition File URL',
     url: 'https://',
-    selected: false,
+    //selected: false,
     hasError: false
   };
   $scope.selectedOption = 1;
@@ -172,8 +172,8 @@ angular.module('ambariAdminConsole')
    * User can select ONLY one option to upload version definition file
    */
   $scope.toggleOptionSelect = function () {
-    $scope.option1.selected = $scope.selectedOption == $scope.option1.index;
-    $scope.option2.selected = $scope.selectedOption == $scope.option2.index;
+    //$scope.option1.selected = $scope.selectedOption == $scope.option1.index;
+    //$scope.option2.selected = $scope.selectedOption == $scope.option2.index;
     $scope.option1.hasError = false;
     $scope.option2.hasError = false;
   };
@@ -215,11 +215,11 @@ angular.module('ambariAdminConsole')
     return Stack.getLatestRepo('HDP').then(function (response) {
       $scope.id = response.id;
       $scope.isPatch = response.type == 'PATCH';
-      $scope.stackNameVersion = response.stackNameVersion;
-      $scope.displayName = response.displayName;
-      $scope.version = response.version;
-      $scope.actualVersion = response.actualVersion;
-      $scope.services = response.services;
+      $scope.stackNameVersion = response.stackNameVersion || 'n/a';
+      $scope.displayName = response.displayName || 'n/a';
+      $scope.version = response.version || 'n/a';
+      $scope.actualVersion = response.actualVersion || 'n/a';
+      $scope.services = response.services || [];
       //save default values of repos to check if they were changed
       $scope.defaulfOSRepos = {};
       response.updateObj.operating_systems.forEach(function(os) {
@@ -236,19 +236,19 @@ angular.module('ambariAdminConsole')
       $scope.osList = response.osList;
       // if user reach here from UI click, repo status should be cached
       // otherwise re-fetch repo status from cluster end point.
-      $scope.repoStatus = Cluster.repoStatusCache[$scope.id];
-      if (!$scope.repoStatus) {
-        $scope.fetchClusters()
-          .then(function () {
-            return $scope.fetchRepoClusterStatus();
-          })
-          .then(function () {
-            $scope.deleteEnabled = $scope.isDeletable();
-          });
-      } else {
-        $scope.deleteEnabled = $scope.isDeletable();
-      }
-      $scope.addMissingOSList();
+//      $scope.repoStatus = Cluster.repoStatusCache[$scope.id];
+//      if (!$scope.repoStatus) {
+//        $scope.fetchClusters()
+//          .then(function () {
+//            return $scope.fetchRepoClusterStatus();
+//          })
+//          .then(function () {
+//            $scope.deleteEnabled = $scope.isDeletable();
+//          });
+//      } else {
+//        $scope.deleteEnabled = $scope.isDeletable();
+//      }
+      //$scope.addMissingOSList();
     });
   };
 }]);

http://git-wip-us.apache.org/repos/asf/ambari/blob/0b475640/ambari-admin/src/main/resources/ui/admin-web/app/scripts/i18n.config.js
----------------------------------------------------------------------
diff --git a/ambari-admin/src/main/resources/ui/admin-web/app/scripts/i18n.config.js b/ambari-admin/src/main/resources/ui/admin-web/app/scripts/i18n.config.js
index 21622b5..ed42b45 100644
--- a/ambari-admin/src/main/resources/ui/admin-web/app/scripts/i18n.config.js
+++ b/ambari-admin/src/main/resources/ui/admin-web/app/scripts/i18n.config.js
@@ -306,7 +306,17 @@ angular.module('ambariAdminConsole')
       'os': 'OS',
       'baseURL': 'Base URL',
       'skipValidation': 'Skip Repository Base URL validation (Advanced)',
-
+      'contents': {
+        'title': 'Contents',
+        'empty': 'No contents to display'
+      },
+      'details': {
+        'stackName': 'Stack Name',
+        'displayName': 'Display Name',
+        'version': 'Version',
+        'actualVersion': 'Actual Version',
+        'releaseNotes': 'Release Notes'
+      },
       'changeBaseURLConfirmation': {
         'title': 'Confirm Base URL Change',
         'message': 'You are about to change repository Base URLs that are already in use. Please confirm that you intend to make this change and that the new Base URLs point to the same exact Stack version and build'

http://git-wip-us.apache.org/repos/asf/ambari/blob/0b475640/ambari-admin/src/main/resources/ui/admin-web/app/scripts/services/Stack.js
----------------------------------------------------------------------
diff --git a/ambari-admin/src/main/resources/ui/admin-web/app/scripts/services/Stack.js b/ambari-admin/src/main/resources/ui/admin-web/app/scripts/services/Stack.js
index a28943c..aa81b5d 100644
--- a/ambari-admin/src/main/resources/ui/admin-web/app/scripts/services/Stack.js
+++ b/ambari-admin/src/main/resources/ui/admin-web/app/scripts/services/Stack.js
@@ -176,11 +176,176 @@ angular.module('ambariAdminConsole')
     getLatestRepo: function (stack_name) {
       var url = Settings.baseUrl + '/stacks/' + stack_name + '/versions?' +
         'fields=repository_versions/operating_systems/repositories/*' +
-        ',repository_versions/RepositoryVersions/*';
+        ',repository_versions/RepositoryVersions/*';  // tbd
       var deferred = $q.defer();
       $http.get(url, {mock: 'version/version.json'})
         .success(function (data) {
-          data = data.items[0];
+          //data = data.items[0];
+          data = {
+            "href" : "http://c6401.ambari.apache.org:8080/api/v1/stacks/HDP/versions/2.2",
+            "Versions" : {
+              "stack_name" : "HDP",
+              "stack_version" : "2.2"
+            },
+            "repository_versions" : [
+              {
+                "href" : "http://c6401.ambari.apache.org:8080/api/v1/stacks/HDP/versions/2.2/repository_versions/15",
+                "RepositoryVersions" : {
+                  "id" : 15,
+                  "repository_version" : "2.2.0.1-901",
+                  "stack_name" : "HDP",
+                  "stack_version" : "2.2",
+                  "release": {
+                    "type": "PATCH",
+                    "stack_id": "HDP-2.2",
+                    "version": "2.2.0.1",
+                    "build": "901",
+                    "compatible_with": "2.2.0.1-[1-9]",
+                    "release_notes": "http://someurl"
+                  },
+                  "services": [
+                    {
+                      "name": "HDFS",
+                      "display_name": "HDFS",
+                      "versions": [
+                        {
+                          "version": "2.1.1",
+                          "version_id": "10",
+                          "components": [ "NAMENODE"]
+                        }
+                      ]
+                    },
+                    {
+                      "name": "HIVE",
+                      "display_name": "Hive",
+                      "versions": [
+                        {
+                          "version": "1.2.1"
+                        }
+                      ]
+                    },
+                    {
+                      "name": "ZOOKEEPER",
+                      "display_name": "ZooKeeper",
+                      "versions": [
+                        {
+                          "version": "3.4.5"
+                        }
+                      ]
+                    },
+                    {
+                      "name": "YARN",
+                      "versions": [
+                        {
+                          "version": "2.7.1"
+                        }
+                      ]
+                    },
+                    {
+                      "name": "SPARK",
+                      "versions": [
+                        {
+                          "version": "1.4.1"
+                        }
+                      ]
+                    },
+                    {
+                      "name": "SPARK",
+                      "versions": [
+                        {
+                          "version": "1.5.2"
+                        }
+                      ]
+                    }
+                  ]
+                },
+                "operating_systems" : [
+                  {
+                    "href" : "http://c6401.ambari.apache.org:8080/api/v1/stacks/HDP/versions/2.2/repository_versions/15/operating_systems/redhat5",
+                    "OperatingSystems" : {
+                      "os_type" : "redhat5",
+                      "repository_version_id" : 15,
+                      "stack_name" : "HDP",
+                      "stack_version" : "2.2"
+                    },
+                    "repositories" : [
+                      {
+                        "href" : "http://c6401.ambari.apache.org:8080/api/v1/stacks/HDP/versions/2.2/repository_versions/15/operating_systems/redhat5/repositories/HDP-2.2.0.1-901",
+                        "Repositories" : {
+                          "base_url" : "http://s3.amazonaws.com/dev.hortonworks.com/HDP/centos5/2.x/updates/2.2.0.0",
+                          "default_base_url" : "",
+                          "latest_base_url" : "",
+                          "mirrors_list" : "",
+                          "os_type" : "redhat5",
+                          "repo_id" : "HDP-2.2.0.1-901",
+                          "repo_name" : "HDP",
+                          "repository_version_id" : 15,
+                          "stack_name" : "HDP",
+                          "stack_version" : "2.2"
+                        }
+                      },
+                      {
+                        "href" : "http://c6401.ambari.apache.org:8080/api/v1/stacks/HDP/versions/2.2/repository_versions/15/operating_systems/redhat5/repositories/HDP-UTILS-2.2.0.1-901",
+                        "Repositories" : {
+                          "base_url" : "http://s3.amazonaws.com/dev.hortonworks.com/HDP/centos5/2.x/updates/2.2.0.0",
+                          "default_base_url" : "",
+                          "latest_base_url" : "",
+                          "mirrors_list" : "",
+                          "os_type" : "redhat5",
+                          "repo_id" : "HDP-UTILS-2.2.0.1-901",
+                          "repo_name" : "HDP-UTILS",
+                          "repository_version_id" : 15,
+                          "stack_name" : "HDP",
+                          "stack_version" : "2.2"
+                        }
+                      }
+                    ]
+                  },
+                  {
+                    "href" : "http://c6401.ambari.apache.org:8080/api/v1/stacks/HDP/versions/2.2/repository_versions/15/operating_systems/redhat6",
+                    "OperatingSystems" : {
+                      "os_type" : "redhat6",
+                      "repository_version_id" : 15,
+                      "stack_name" : "HDP",
+                      "stack_version" : "2.2"
+                    },
+                    "repositories" : [
+                      {
+                        "href" : "http://c6401.ambari.apache.org:8080/api/v1/stacks/HDP/versions/2.2/repository_versions/15/operating_systems/redhat6/repositories/HDP-2.2.0.1-901",
+                        "Repositories" : {
+                          "base_url" : "http://s3.amazonaws.com/dev.hortonworks.com/HDP/centos5/2.x/updates/2.2.0.0",
+                          "default_base_url" : "",
+                          "latest_base_url" : "",
+                          "mirrors_list" : "",
+                          "os_type" : "redhat6",
+                          "repo_id" : "HDP-2.2.0.1-901",
+                          "repo_name" : "HDP",
+                          "repository_version_id" : 15,
+                          "stack_name" : "HDP",
+                          "stack_version" : "2.2"
+                        }
+                      },
+                      {
+                        "href" : "http://c6401.ambari.apache.org:8080/api/v1/stacks/HDP/versions/2.2/repository_versions/15/operating_systems/redhat6/repositories/HDP-UTILS-2.2.0.1-901",
+                        "Repositories" : {
+                          "base_url" : "http://s3.amazonaws.com/dev.hortonworks.com/HDP/centos5/2.x/updates/2.2.0.0",
+                          "default_base_url" : "",
+                          "latest_base_url" : "",
+                          "mirrors_list" : "",
+                          "os_type" : "redhat6",
+                          "repo_id" : "HDP-UTILS-2.2.0.1-901",
+                          "repo_name" : "HDP-UTILS",
+                          "repository_version_id" : 15,
+                          "stack_name" : "HDP",
+                          "stack_version" : "2.2"
+                        }
+                      }
+                    ]
+                  }
+                ]
+              }
+            ]
+          };
           var response = {
             id : data.repository_versions[0].RepositoryVersions.id,
             stackVersion : data.Versions.stack_version,
@@ -189,6 +354,7 @@ angular.module('ambariAdminConsole')
             stackNameVersion: data.Versions.stack_name + '-' + data.Versions.stack_version, /// HDP-2.3
             actualVersion: data.repository_versions[0].RepositoryVersions.repository_version, /// 2.3.4.0-3846
             version: data.repository_versions[0].RepositoryVersions.release.version, /// 2.3.4.0
+            releaseNotes: data.repository_versions[0].RepositoryVersions.release.release_notes,
             displayName: data.Versions.stack_name + '-' + data.repository_versions[0].RepositoryVersions.release.version, //HDP-2.3.4.0
             repoVersionFullName : data.Versions.stack_name + '-' + data.repository_versions[0].RepositoryVersions.repository_version,
             osList: data.repository_versions[0].operating_systems,

http://git-wip-us.apache.org/repos/asf/ambari/blob/0b475640/ambari-admin/src/main/resources/ui/admin-web/app/styles/main.css
----------------------------------------------------------------------
diff --git a/ambari-admin/src/main/resources/ui/admin-web/app/styles/main.css b/ambari-admin/src/main/resources/ui/admin-web/app/styles/main.css
index de3d526..8385e48 100644
--- a/ambari-admin/src/main/resources/ui/admin-web/app/styles/main.css
+++ b/ambari-admin/src/main/resources/ui/admin-web/app/styles/main.css
@@ -1398,3 +1398,18 @@ thead.view-permission-header > tr > th {
   padding-bottom: 20px;
 }
 
+.register-version-form .patch-icon {
+  color: #ff4500;
+}
+
+.register-version-form .version-info {
+  padding-top: 7px;
+  margin-top: 0;
+  margin-bottom: 0;
+}
+
+.version-contents-body {
+  max-height: 150px;
+  overflow: scroll;
+}
+

http://git-wip-us.apache.org/repos/asf/ambari/blob/0b475640/ambari-admin/src/main/resources/ui/admin-web/app/views/stackVersions/stackVersionPage.html
----------------------------------------------------------------------
diff --git a/ambari-admin/src/main/resources/ui/admin-web/app/views/stackVersions/stackVersionPage.html b/ambari-admin/src/main/resources/ui/admin-web/app/views/stackVersions/stackVersionPage.html
index e23aecb..488f077 100644
--- a/ambari-admin/src/main/resources/ui/admin-web/app/views/stackVersions/stackVersionPage.html
+++ b/ambari-admin/src/main/resources/ui/admin-web/app/views/stackVersions/stackVersionPage.html
@@ -31,58 +31,69 @@
 <hr>
 
 
-
-<div class="clearfix register-version-options">
-  <div class="col-sm-5 option-radio-button">
-    <label class="option-label">
-      <input type="radio" ng-model="selectedOption" value="{{option1.index}}" ng-change="toggleOptionSelect()"> {{'versions.uploadFile' | translate}}
-    </label>
-  </div>
-  <div class="col-sm-7">
-    <input type="file" class="choose-file-input" ng-file-select="onFileSelect($files)"/>
-  </div>
-</div>
-<div class="clearfix register-version-options border-bottom bottom-margin">
-  <div class="col-sm-5 option-radio-button">
-    <label class="option-label">
-      <input type="radio" ng-model="selectedOption" value="{{option2.index}}" ng-change="toggleOptionSelect()"> {{'versions.enterURL' | translate}}
-    </label>
+<div id="upload-definition-file-panel" ng-if="createController">
+  <div class="clearfix register-version-options">
+    <div class="col-sm-5 option-radio-button">
+      <label class="option-label">
+        <input type="radio" ng-model="selectedOption" value="{{option1.index}}" ng-change="toggleOptionSelect()"> {{'versions.uploadFile' | translate}}
+      </label>
+    </div>
+    <div class="col-sm-7">
+      <input type="file" class="choose-file-input" ng-file-select="onFileSelect($files)"/>
+    </div>
   </div>
+  <div class="clearfix register-version-options border-bottom bottom-margin">
+    <div class="col-sm-5 option-radio-button">
+      <label class="option-label">
+        <input type="radio" ng-model="selectedOption" value="{{option2.index}}" ng-change="toggleOptionSelect()"> {{'versions.enterURL' | translate}}
+      </label>
+    </div>
     <div class="col-sm-7">
       <div class="form-group {{option2.name}}" ng-class="{'has-error': option2.url.hasError }">
-        <div class=""><input type="text" class="form-control" ng-model="option2.url" ng-change="clearOptionsError()" ng-disabled="!option2.selected"></div>
+        <div class=""><input type="text" class="form-control" ng-model="option2.url" ng-change="clearOptionsError()" ng-disabled="!(selectedOption==2)"></div>
       </div>
     </div>
     <div class="col-sm-12 read-info-button">
-    <button class="btn btn-primary pull-right" ng-click="readVersionInfo()"
-        ng-disabled="readInfoButtonDisabled()">{{'versions.readInfo' | translate}}</button>
+      <button class="btn btn-primary pull-right" ng-click="readVersionInfo()"
+            ng-disabled="readInfoButtonDisabled()">{{'versions.readInfo' | translate}}</button>
     </div>
+  </div>
 </div>
 
-
 <form class="form-horizontal register-version-form" role="form" name="repoRegForm" novalidate>
-  <div class="panel panel-default" ng-if="createController">
+  <div class="panel panel-default">
     <div class="panel-heading">
       <h3 class="panel-title">{{'common.details' | translate}}</h3>
     </div>
     <div class="panel-body">
-      <div class="form-inline repo-version-inline">
-        <label class="control-label col-sm-2 repo-version-label">{{'common.name' | translate}}</label>
-        <div class="col-sm-10">
-          <select class="form-control repo-version-select"
-                  ng-model="$parent.upgradeStack.selected"
-                  ng-options="o as o.displayName for o in upgradeStack.options"
-                  ng-change="afterStackVersionChange()">
-          </select>
-          <span class="bold-dot">.</span>
-          <div class="form-group" ng-class="{'has-error' : repoRegForm.version.$error.pattern}">
-            <input class="form-control" name="version" type="text" ng-model="$parent.repoSubversion" ng-pattern="subversionPattern"
-                   placeholder="{{'versions.placeholder' | translate}}" ng-change="updateCurrentVersionInput()" required/>
-            <span class="text-danger" ng-show="repoRegForm.version.$error.pattern">
-              &nbsp{{'common.alerts.isInvalid' | translate: '{term: currentVersionInput}'}}
-            </span>
-          </div>
-        </div>
+      <div class="clearfix">
+        <label class="control-label col-sm-3">{{'versions.details.stackName' | translate}}</label>
+        <div class="version-info col-sm-7">{{stackNameVersion}}</div>
+        <div class="col-sm-2 patch-icon" ng-if="isPatch"><span class="glyphicon glyphicon-tree-deciduous">Patch</span></div>
+      </div>
+      <div class="clearfix">
+        <label class="control-label col-sm-3">{{'versions.details.displayName' | translate}}</label>
+        <div class="version-info col-sm-9">{{displayName}}</div>
+      </div>
+      <div class="clearfix">
+        <label class="control-label col-sm-3">{{'versions.details.version' | translate}}</label>
+        <div class="version-info col-sm-9">{{version}}</div>
+      </div>
+      <div class="clearfix">
+        <label class="control-label col-sm-3">{{'versions.details.actualVersion' | translate}}</label>
+        <div class="version-info col-sm-9">{{actualVersion}}</div>
+      </div>
+    </div>
+  </div>
+  <div class="panel panel-default">
+    <div class="panel-heading">
+      <h3 class="panel-title">{{'versions.contents.title' | translate}}</h3>
+    </div>
+    <div class="panel-body version-contents-body">
+      <div class="alert alert-info hide-soft" ng-class="{'visible' : !services || !services.length}" role="alert">{{'versions.contents.empty' | translate}}</div>
+      <div class="clearfix" ng-repeat="service in services">
+        <label class="control-label col-sm-3">{{service.name}}</label>
+        <div class="version-info col-sm-9">{{service.version}}</div>
       </div>
     </div>
   </div>


[33/50] [abbrv] ambari git commit: AMBARI-15094 - Patch Upgrade UpgradeCatalog Changes (jonathanhurley)

Posted by nc...@apache.org.
AMBARI-15094 - Patch Upgrade UpgradeCatalog Changes (jonathanhurley)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/4f78af7a
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/4f78af7a
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/4f78af7a

Branch: refs/heads/trunk
Commit: 4f78af7ae47564fdc6a2c013cf1757f19005e6a5
Parents: 9873e69
Author: Jonathan Hurley <jh...@hortonworks.com>
Authored: Thu Feb 18 13:23:43 2016 -0500
Committer: Jonathan Hurley <jh...@hortonworks.com>
Committed: Fri Feb 19 11:11:30 2016 -0500

----------------------------------------------------------------------
 .../apache/ambari/server/orm/DBAccessor.java    |  16 +-
 .../ambari/server/orm/DBAccessorImpl.java       |  86 ++++++-
 .../server/orm/helpers/dbms/DbmsHelper.java     |  12 +-
 .../orm/helpers/dbms/GenericDbmsHelper.java     |  10 +
 .../server/upgrade/UpgradeCatalog240.java       | 225 ++++++++++++++++---
 .../main/resources/Ambari-DDL-MySQL-CREATE.sql  |   1 +
 .../server/upgrade/UpgradeCatalog240Test.java   | 119 +++++++---
 7 files changed, 397 insertions(+), 72 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/4f78af7a/ambari-server/src/main/java/org/apache/ambari/server/orm/DBAccessor.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/DBAccessor.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/DBAccessor.java
index 49f108b..8142661 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/orm/DBAccessor.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/DBAccessor.java
@@ -545,7 +545,9 @@ public interface DBAccessor {
   /**
    * Queries the database to determine the name of the primary key constraint on
    * the specified table. Currently, this is only implemented for
-   * {@link DatabaseType#ORACLE} and {@link DatabaseType#SQL_SERVER}.
+   * {@link DatabaseType#POSTGRES}, {@link DatabaseType#ORACLE} and
+   * {@link DatabaseType#SQL_SERVER}. {@link DatabaseType#MYSQL} does not need
+   * this since PKs can be dropped without referencing their name.
    *
    * @param tableName
    *          the name of the table to lookup the PK constraint.
@@ -554,6 +556,18 @@ public interface DBAccessor {
    */
   String getPrimaryKeyConstraintName(String tableName) throws SQLException;
 
+  /**
+   * Attempts to drop the discovered PRIMARY KEY constraint on the specified
+   * table, defaulting to the specified default if not found.
+   *
+   * @param tableName
+   *          the table to drop the PK from (not {@code null}).
+   * @param defaultConstraintName
+   *          the default name of the PK constraint if none is found.
+   * @throws SQLException
+   */
+  void dropPKConstraint(String tableName, String defaultConstraintName) throws SQLException;
+
   enum DbType {
     ORACLE,
     MYSQL,

http://git-wip-us.apache.org/repos/asf/ambari/blob/4f78af7a/ambari-server/src/main/java/org/apache/ambari/server/orm/DBAccessorImpl.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/DBAccessorImpl.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/DBAccessorImpl.java
index abd05bc..329fea8 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/orm/DBAccessorImpl.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/DBAccessorImpl.java
@@ -31,12 +31,12 @@ import java.sql.ResultSetMetaData;
 import java.sql.SQLException;
 import java.sql.Statement;
 import java.sql.Types;
-import java.text.MessageFormat;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.List;
 
 import org.apache.ambari.server.configuration.Configuration;
+import org.apache.ambari.server.configuration.Configuration.DatabaseType;
 import org.apache.ambari.server.orm.helpers.ScriptRunner;
 import org.apache.ambari.server.orm.helpers.dbms.DbmsHelper;
 import org.apache.ambari.server.orm.helpers.dbms.DerbyHelper;
@@ -45,6 +45,7 @@ import org.apache.ambari.server.orm.helpers.dbms.MySqlHelper;
 import org.apache.ambari.server.orm.helpers.dbms.OracleHelper;
 import org.apache.ambari.server.orm.helpers.dbms.PostgresHelper;
 import org.apache.ambari.server.utils.CustomStringUtils;
+import org.apache.commons.lang.ArrayUtils;
 import org.apache.commons.lang.StringUtils;
 import org.eclipse.persistence.internal.helper.DBPlatformHelper;
 import org.eclipse.persistence.internal.sessions.DatabaseSessionImpl;
@@ -146,12 +147,19 @@ public class DBAccessorImpl implements DBAccessor {
 
   @Override
   public void createTable(String tableName, List<DBColumnInfo> columnInfo,
-          String... primaryKeyColumns) throws SQLException {
-    if (!tableExists(tableName)) {
-      String query = dbmsHelper.getCreateTableStatement(tableName, columnInfo, Arrays.asList(primaryKeyColumns));
-
-      executeQuery(query);
+      String... primaryKeyColumns) throws SQLException {
+    // do nothing if the table already exists
+    if (tableExists(tableName)) {
+      return;
     }
+
+    // guard against null PKs
+    primaryKeyColumns = ArrayUtils.nullToEmpty(primaryKeyColumns);
+
+    String query = dbmsHelper.getCreateTableStatement(tableName, columnInfo,
+        Arrays.asList(primaryKeyColumns));
+
+    executeQuery(query);
   }
 
   protected DatabaseMetaData getDatabaseMetaData() throws SQLException {
@@ -772,6 +780,14 @@ public class DBAccessorImpl implements DBAccessor {
     if (checkedConstraintName != null) {
       String query = dbmsHelper.getDropFKConstraintStatement(tableName, checkedConstraintName);
       executeQuery(query, ignoreFailure);
+
+      // MySQL also adds indexes in addition to the FK which should be dropped
+      Configuration.DatabaseType databaseType = configuration.getDatabaseType();
+      if (databaseType == DatabaseType.MYSQL) {
+        query = dbmsHelper.getDropIndexStatement(constraintName, tableName);
+        executeQuery(query, true);
+      }
+
     } else {
       LOG.warn("Constraint {} from {} table not found, nothing to drop", constraintName, tableName);
     }
@@ -1022,9 +1038,9 @@ public class DBAccessorImpl implements DBAccessor {
 
     switch (databaseType) {
       case ORACLE: {
-        String lookupPrimaryKeyNameSql = MessageFormat.format(
-            "SELECT constraint_name FROM all_constraints WHERE table_name = ''{0}'' AND constraint_type = ''P''",
-            tableName.toUpperCase());
+        String lookupPrimaryKeyNameSql = String.format(
+            "SELECT constraint_name FROM all_constraints WHERE UPPER(table_name) = UPPER('%s') AND constraint_type = 'P'",
+            tableName);
 
         try {
           statement = getConnection().createStatement();
@@ -1040,8 +1056,8 @@ public class DBAccessorImpl implements DBAccessor {
         break;
       }
       case SQL_SERVER: {
-        String lookupPrimaryKeyNameSql = MessageFormat.format(
-            "SELECT constraint_name FROM INFORMATION_SCHEMA.KEY_COLUMN_USAGE WHERE OBJECTPROPERTY(OBJECT_ID(constraint_name), 'IsPrimaryKey') = 1 AND table_name = {0}",
+        String lookupPrimaryKeyNameSql = String.format(
+            "SELECT constraint_name FROM INFORMATION_SCHEMA.KEY_COLUMN_USAGE WHERE OBJECTPROPERTY(OBJECT_ID(constraint_name), 'IsPrimaryKey') = 1 AND table_name = '%s'",
             tableName);
 
         try {
@@ -1055,6 +1071,25 @@ public class DBAccessorImpl implements DBAccessor {
           JdbcUtils.closeStatement(statement);
         }
 
+        break;
+      }
+      case POSTGRES: {
+        String lookupPrimaryKeyNameSql = String.format(
+            "SELECT constraint_name FROM information_schema.table_constraints AS tc WHERE tc.constraint_type = 'PRIMARY KEY' AND table_name = '%s'",
+            tableName);
+
+        try {
+          statement = getConnection().createStatement();
+          resultSet = statement.executeQuery(lookupPrimaryKeyNameSql);
+          if (resultSet.next()) {
+            primaryKeyConstraintName = resultSet.getString("constraint_name");
+          }
+        } finally {
+          JdbcUtils.closeResultSet(resultSet);
+          JdbcUtils.closeStatement(statement);
+        }
+
+        break;
       }
       default:
         break;
@@ -1063,4 +1098,33 @@ public class DBAccessorImpl implements DBAccessor {
     return primaryKeyConstraintName;
   }
 
+  /**
+   * {@inheritDoc}
+   */
+  @Override
+  public void dropPKConstraint(String tableName, String defaultConstraintName) throws SQLException {
+    Configuration.DatabaseType databaseType = configuration.getDatabaseType();
+
+    // drop the PK directly if MySQL since it supports it
+    if (databaseType == DatabaseType.MYSQL) {
+      String mysqlDropQuery = String.format("ALTER TABLE %s DROP PRIMARY KEY", tableName);
+      executeQuery(mysqlDropQuery, true);
+      return;
+    }
+
+    // discover the PK name, using the default if none found
+    String primaryKeyConstraintName = getPrimaryKeyConstraintName(tableName);
+    if (null == primaryKeyConstraintName) {
+      primaryKeyConstraintName = defaultConstraintName;
+      LOG.warn("Unable to dynamically determine the PK constraint name for {}, defaulting to {}",
+          tableName, defaultConstraintName);
+    }
+
+    // warn if we can't find it
+    if (null == primaryKeyConstraintName) {
+      LOG.warn("Unable to determine the primary key constraint name for {}", tableName);
+    } else {
+      dropPKConstraint(tableName, primaryKeyConstraintName, true);
+    }
+  }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/4f78af7a/ambari-server/src/main/java/org/apache/ambari/server/orm/helpers/dbms/DbmsHelper.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/helpers/dbms/DbmsHelper.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/helpers/dbms/DbmsHelper.java
index cdc1b4a..30c06fb 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/orm/helpers/dbms/DbmsHelper.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/helpers/dbms/DbmsHelper.java
@@ -59,6 +59,16 @@ public interface DbmsHelper {
                                  String... columnNames);
 
   /**
+   * Gets DROP INDEX statement
+   *
+   * @param indexName
+   * @param tableName
+   * @param columnNames
+   * @return
+   */
+  String getDropIndexStatement(String indexName, String tableName);
+
+  /**
    * Generate alter table statement to add unique constraint
    * @param tableName name of the table
    * @param constraintName name of the constraint
@@ -102,7 +112,7 @@ public interface DbmsHelper {
 
   /**
    * Gets the {@code SET NULL} or {@code SET NOT NULL} statement.
-   * 
+   *
    * @param tableName
    *          the table (not {@code null}).
    * @param columnInfo

http://git-wip-us.apache.org/repos/asf/ambari/blob/4f78af7a/ambari-server/src/main/java/org/apache/ambari/server/orm/helpers/dbms/GenericDbmsHelper.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/helpers/dbms/GenericDbmsHelper.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/helpers/dbms/GenericDbmsHelper.java
index 8dfb8ba..21fa361 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/orm/helpers/dbms/GenericDbmsHelper.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/helpers/dbms/GenericDbmsHelper.java
@@ -243,6 +243,16 @@ public class GenericDbmsHelper implements DbmsHelper {
     return createIndex;
   }
 
+
+  /**
+   * {@inheritDoc}
+   */
+  @Override
+  public String getDropIndexStatement(String indexName, String tableName) {
+    String dropIndex = databasePlatform.buildDropIndex(tableName, indexName);
+    return dropIndex;
+  }
+
   @Override
   public String getAddUniqueConstraintStatement(String tableName, String constraintName, String... columnNames){
     UniqueKeyConstraint uniqueKeyConstraint = new UniqueKeyConstraint();

http://git-wip-us.apache.org/repos/asf/ambari/blob/4f78af7a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog240.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog240.java b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog240.java
index d97962f..09f31e4 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog240.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog240.java
@@ -18,17 +18,21 @@
 
 package org.apache.ambari.server.upgrade;
 
-import com.google.common.collect.Lists;
-import com.google.gson.JsonArray;
-import com.google.gson.JsonElement;
-import com.google.gson.JsonObject;
-import com.google.gson.JsonParser;
-import com.google.gson.JsonPrimitive;
-import com.google.inject.Inject;
-import com.google.inject.Injector;
+import java.sql.Clob;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.sql.Statement;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.UUID;
+import java.util.concurrent.atomic.AtomicLong;
+
 import org.apache.ambari.server.AmbariException;
 import org.apache.ambari.server.controller.AmbariManagementController;
-import org.apache.ambari.server.orm.DBAccessor;
+import org.apache.ambari.server.orm.DBAccessor.DBColumnInfo;
 import org.apache.ambari.server.orm.dao.AlertDefinitionDAO;
 import org.apache.ambari.server.orm.dao.DaoUtils;
 import org.apache.ambari.server.orm.dao.PermissionDAO;
@@ -37,19 +41,20 @@ import org.apache.ambari.server.orm.entities.AlertDefinitionEntity;
 import org.apache.ambari.server.orm.entities.PermissionEntity;
 import org.apache.ambari.server.state.Cluster;
 import org.apache.ambari.server.state.Clusters;
+import org.apache.ambari.server.state.RepositoryType;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
+import org.springframework.jdbc.support.JdbcUtils;
 
-import java.sql.ResultSet;
-import java.sql.SQLException;
-import java.sql.Statement;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.HashMap;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-import java.util.UUID;
+import com.google.common.collect.Lists;
+import com.google.gson.JsonArray;
+import com.google.gson.JsonElement;
+import com.google.gson.JsonObject;
+import com.google.gson.JsonParser;
+import com.google.gson.JsonPrimitive;
+import com.google.inject.Inject;
+import com.google.inject.Injector;
+import com.google.inject.persist.Transactional;
 
 /**
  * Upgrade catalog for version 2.4.0.
@@ -59,6 +64,13 @@ public class UpgradeCatalog240 extends AbstractUpgradeCatalog {
   protected static final String ADMIN_PERMISSION_TABLE = "adminpermission";
   protected static final String PERMISSION_ID_COL = "permission_name";
   protected static final String SORT_ORDER_COL = "sort_order";
+  protected static final String REPO_VERSION_TABLE = "repo_version";
+  protected static final String SERVICE_COMPONENT_DS_TABLE = "servicecomponentdesiredstate";
+  protected static final String HOST_COMPONENT_DS_TABLE = "hostcomponentdesiredstate";
+  protected static final String HOST_COMPONENT_STATE_TABLE = "hostcomponentstate";
+  protected static final String SERVICE_COMPONENT_HISTORY_TABLE = "servicecomponent_history";
+  protected static final String UPGRADE_TABLE = "upgrade";
+  protected static final String STACK_TABLE = "stack";
 
   @Inject
   DaoUtils daoUtils;
@@ -116,6 +128,9 @@ public class UpgradeCatalog240 extends AbstractUpgradeCatalog {
   protected void executeDDLUpdates() throws AmbariException, SQLException {
     updateAdminPermissionTable();
     createSettingTable();
+    updateRepoVersionTableDDL();
+    updateServiceComponentDesiredStateTableDDL();
+    createServiceComponentHistoryTable();
   }
 
   @Override
@@ -132,17 +147,17 @@ public class UpgradeCatalog240 extends AbstractUpgradeCatalog {
   }
 
   private void createSettingTable() throws SQLException {
-    List<DBAccessor.DBColumnInfo> columns = new ArrayList<>();
+    List<DBColumnInfo> columns = new ArrayList<>();
 
     //  Add setting table
     LOG.info("Creating " + SETTING_TABLE + " table");
 
-    columns.add(new DBAccessor.DBColumnInfo(ID, Long.class, null, null, false));
-    columns.add(new DBAccessor.DBColumnInfo("name", String.class, 255, null, false));
-    columns.add(new DBAccessor.DBColumnInfo("setting_type", String.class, 255, null, false));
-    columns.add(new DBAccessor.DBColumnInfo("content", String.class, 3000, null, false));
-    columns.add(new DBAccessor.DBColumnInfo("updated_by", String.class, 255, "_db", false));
-    columns.add(new DBAccessor.DBColumnInfo("update_timestamp", Long.class, null, null, false));
+    columns.add(new DBColumnInfo(ID, Long.class, null, null, false));
+    columns.add(new DBColumnInfo("name", String.class, 255, null, false));
+    columns.add(new DBColumnInfo("setting_type", String.class, 255, null, false));
+    columns.add(new DBColumnInfo("content", String.class, 3000, null, false));
+    columns.add(new DBColumnInfo("updated_by", String.class, 255, "_db", false));
+    columns.add(new DBColumnInfo("update_timestamp", Long.class, null, null, false));
     dbAccessor.createTable(SETTING_TABLE, columns, ID);
     addSequence("setting_id_seq", 0L, false);
   }
@@ -368,7 +383,8 @@ public class UpgradeCatalog240 extends AbstractUpgradeCatalog {
 
   protected void updateAdminPermissionTable() throws SQLException {
     // Add the sort_order column to the adminpermission table
-    dbAccessor.addColumn(ADMIN_PERMISSION_TABLE, new DBAccessor.DBColumnInfo(SORT_ORDER_COL, Short.class, null, 1, false));
+    dbAccessor.addColumn(ADMIN_PERMISSION_TABLE,
+        new DBColumnInfo(SORT_ORDER_COL, Short.class, null, 1, false));
   }
 
   protected void setRoleSortOrder() throws SQLException {
@@ -391,4 +407,159 @@ public class UpgradeCatalog240 extends AbstractUpgradeCatalog {
         7, PermissionEntity.VIEW_USER_PERMISSION_NAME));
   }
 
+  /**
+   * Makes the following changes to the {@value #REPO_VERSION_TABLE} table:
+   * <ul>
+   * <li>repo_type VARCHAR(255) DEFAULT 'STANDARD' NOT NULL</li>
+   * <li>version_url VARCHAR(1024)</li>
+   * <li>version_xml MEDIUMTEXT</li>
+   * <li>version_xsd VARCHAR(512)</li>
+   * <li>parent_id BIGINT</li>
+   * </ul>
+   *
+   * @throws SQLException
+   */
+  private void updateRepoVersionTableDDL() throws SQLException {
+    DBColumnInfo repoTypeColumn = new DBColumnInfo("repo_type", String.class, 255, RepositoryType.STANDARD.name(), false);
+    DBColumnInfo versionUrlColumn = new DBColumnInfo("version_url", String.class, 1024, null, true);
+    DBColumnInfo versionXmlColumn = new DBColumnInfo("version_xml", Clob.class, null, null, true);
+    DBColumnInfo versionXsdColumn = new DBColumnInfo("version_xsd", String.class, 512, null, true);
+    DBColumnInfo parentIdColumn = new DBColumnInfo("parent_id", Long.class, null, null, true);
+
+    dbAccessor.addColumn(REPO_VERSION_TABLE, repoTypeColumn);
+    dbAccessor.addColumn(REPO_VERSION_TABLE, versionUrlColumn);
+    dbAccessor.addColumn(REPO_VERSION_TABLE, versionXmlColumn);
+    dbAccessor.addColumn(REPO_VERSION_TABLE, versionXsdColumn);
+    dbAccessor.addColumn(REPO_VERSION_TABLE, parentIdColumn);
+  }
+
+  /**
+   * Makes the following changes to the {@value #SERVICE_COMPONENT_DS_TABLE} table,
+   * but only if the table doesn't have it's new PK set.
+   * <ul>
+   * <li>id BIGINT NOT NULL</li>
+   * <li>Drops FKs on {@value #HOST_COMPONENT_DS_TABLE} and {@value #HOST_COMPONENT_STATE_TABLE}</li>
+   * <li>Populates {@value #SQLException#ID} in {@value #SERVICE_COMPONENT_DS_TABLE}</li>
+   * <li>Creates {@code UNIQUE} constraint on {@value #HOST_COMPONENT_DS_TABLE}</li>
+   * <li>Adds FKs on {@value #HOST_COMPONENT_DS_TABLE} and {@value #HOST_COMPONENT_STATE_TABLE}</li>
+   * <li>Adds new sequence value of {@code servicecomponentdesiredstate_id_seq}</li>
+   * </ul>
+   *
+   * @throws SQLException
+   */
+  @Transactional
+  private void updateServiceComponentDesiredStateTableDDL() throws SQLException {
+    if (dbAccessor.tableHasPrimaryKey(SERVICE_COMPONENT_DS_TABLE, ID)) {
+      LOG.info("Skipping {} table Primary Key modifications since the new {} column already exists",
+          SERVICE_COMPONENT_DS_TABLE, ID);
+
+      return;
+    }
+
+    // drop FKs to SCDS in both HCDS and HCS tables
+    dbAccessor.dropFKConstraint(HOST_COMPONENT_DS_TABLE, "hstcmpnntdesiredstatecmpnntnme");
+    dbAccessor.dropFKConstraint(HOST_COMPONENT_STATE_TABLE, "hstcomponentstatecomponentname");
+
+    // remove existing compound PK
+    dbAccessor.dropPKConstraint(SERVICE_COMPONENT_DS_TABLE, "servicecomponentdesiredstate_pkey");
+
+    // add new PK column to SCDS, making it nullable for now
+    DBColumnInfo idColumn = new DBColumnInfo(ID, Long.class, null, null, true);
+    dbAccessor.addColumn(SERVICE_COMPONENT_DS_TABLE, idColumn);
+
+    // populate SCDS id column
+    AtomicLong scdsIdCounter = new AtomicLong(1);
+    Statement statement = null;
+    ResultSet resultSet = null;
+    try {
+      statement = dbAccessor.getConnection().createStatement();
+      if (statement != null) {
+        String selectSQL = String.format("SELECT cluster_id, service_name, component_name FROM %s",
+            SERVICE_COMPONENT_DS_TABLE);
+
+        resultSet = statement.executeQuery(selectSQL);
+        while (null != resultSet && resultSet.next()) {
+          final Long clusterId = resultSet.getLong("cluster_id");
+          final String serviceName = resultSet.getString("service_name");
+          final String componentName = resultSet.getString("component_name");
+
+          String updateSQL = String.format(
+              "UPDATE %s SET %s = %d WHERE cluster_id = %d AND service_name = '%s' AND component_name = '%s'",
+              SERVICE_COMPONENT_DS_TABLE, ID, scdsIdCounter.getAndIncrement(), clusterId,
+              serviceName, componentName);
+
+          dbAccessor.executeQuery(updateSQL);
+        }
+      }
+    } finally {
+      JdbcUtils.closeResultSet(resultSet);
+      JdbcUtils.closeStatement(statement);
+    }
+
+    // make the column NON NULL now
+    dbAccessor.alterColumn(SERVICE_COMPONENT_DS_TABLE,
+        new DBColumnInfo(ID, Long.class, null, null, false));
+
+    // create a new PK, matching the name of the constraint found in SQL
+    dbAccessor.addPKConstraint(SERVICE_COMPONENT_DS_TABLE, "pk_sc_desiredstate", ID);
+
+    // create UNIQUE constraint, ensuring column order matches SQL files
+    String[] uniqueColumns = new String[] { "component_name", "service_name", "cluster_id" };
+    dbAccessor.addUniqueConstraint(SERVICE_COMPONENT_DS_TABLE, "unq_scdesiredstate_name",
+        uniqueColumns);
+
+    // add FKs back to SCDS in both HCDS and HCS tables
+    dbAccessor.addFKConstraint(HOST_COMPONENT_DS_TABLE, "hstcmpnntdesiredstatecmpnntnme",
+        uniqueColumns, SERVICE_COMPONENT_DS_TABLE, uniqueColumns, false);
+
+    dbAccessor.addFKConstraint(HOST_COMPONENT_STATE_TABLE, "hstcomponentstatecomponentname",
+        uniqueColumns, SERVICE_COMPONENT_DS_TABLE, uniqueColumns, false);
+
+    // Add sequence for SCDS id
+    addSequence("servicecomponentdesiredstate_id_seq", scdsIdCounter.get(), false);
+  }
+
+  /**
+   * Makes the following changes to the {@value #SERVICE_COMPONENT_HISTORY_TABLE} table:
+   * <ul>
+   * <li>id BIGINT NOT NULL</li>
+   * <li>component_id BIGINT NOT NULL</li>
+   * <li>upgrade_id BIGINT NOT NULL</li>
+   * <li>from_stack_id BIGINT NOT NULL</li>
+   * <li>to_stack_id BIGINT NOT NULL</li>
+   * <li>CONSTRAINT PK_sc_history PRIMARY KEY (id)</li>
+   * <li>CONSTRAINT FK_sc_history_component_id FOREIGN KEY (component_id) REFERENCES servicecomponentdesiredstate (id)</li>
+   * <li>CONSTRAINT FK_sc_history_upgrade_id FOREIGN KEY (upgrade_id) REFERENCES upgrade (upgrade_id)</li>
+   * <li>CONSTRAINT FK_sc_history_from_stack_id FOREIGN KEY (from_stack_id) REFERENCES stack (stack_id)</li>
+   * <li>CONSTRAINT FK_sc_history_to_stack_id FOREIGN KEY (to_stack_id) REFERENCES stack (stack_id)</li>
+   * <li>Creates the {@code servicecomponent_history_id_seq}</li>
+   * </ul>
+   *
+   * @throws SQLException
+   */
+  private void createServiceComponentHistoryTable() throws SQLException {
+    List<DBColumnInfo> columns = new ArrayList<>();
+    columns.add(new DBColumnInfo(ID, Long.class, null, null, false));
+    columns.add(new DBColumnInfo("component_id", Long.class, null, null, false));
+    columns.add(new DBColumnInfo("upgrade_id", Long.class, null, null, false));
+    columns.add(new DBColumnInfo("from_stack_id", Long.class, null, null, false));
+    columns.add(new DBColumnInfo("to_stack_id", Long.class, null, null, false));
+    dbAccessor.createTable(SERVICE_COMPONENT_HISTORY_TABLE, columns, (String[]) null);
+
+    dbAccessor.addPKConstraint(SERVICE_COMPONENT_HISTORY_TABLE, "PK_sc_history", ID);
+
+    dbAccessor.addFKConstraint(SERVICE_COMPONENT_HISTORY_TABLE, "FK_sc_history_component_id",
+        "component_id", SERVICE_COMPONENT_DS_TABLE, "id", false);
+
+    dbAccessor.addFKConstraint(SERVICE_COMPONENT_HISTORY_TABLE, "FK_sc_history_upgrade_id",
+        "upgrade_id", UPGRADE_TABLE, "upgrade_id", false);
+
+    dbAccessor.addFKConstraint(SERVICE_COMPONENT_HISTORY_TABLE, "FK_sc_history_from_stack_id",
+        "from_stack_id", STACK_TABLE, "stack_id", false);
+
+    dbAccessor.addFKConstraint(SERVICE_COMPONENT_HISTORY_TABLE, "FK_sc_history_to_stack_id",
+        "to_stack_id", STACK_TABLE, "stack_id", false);
+
+    addSequence("servicecomponent_history_id_seq", 0L, false);
+  }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/4f78af7a/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql
index 3ec982a..b892bc8 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql
@@ -1020,6 +1020,7 @@ INSERT INTO ambari_sequences(sequence_name, sequence_value) values ('topology_re
 INSERT INTO ambari_sequences(sequence_name, sequence_value) values ('topology_host_group_id_seq', 0);
 INSERT INTO ambari_sequences(sequence_name, sequence_value) values ('setting_id_seq', 0);
 INSERT INTO ambari_sequences(sequence_name, sequence_value) values ('hostcomponentstate_id_seq', 0);
+INSERT INTO ambari_sequences(sequence_name, sequence_value) values ('servicecomponentdesiredstate_id_seq', 0);
 INSERT INTO ambari_sequences(sequence_name, sequence_value) values ('servicecomponent_history_id_seq', 0);
 
 insert into adminresourcetype (resource_type_id, resource_type_name)

http://git-wip-us.apache.org/repos/asf/ambari/blob/4f78af7a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog240Test.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog240Test.java b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog240Test.java
index a145253..95ae8d8 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog240Test.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog240Test.java
@@ -19,12 +19,33 @@
 package org.apache.ambari.server.upgrade;
 
 
-import com.google.inject.Binder;
-import com.google.inject.Guice;
-import com.google.inject.Injector;
-import com.google.inject.Module;
-import com.google.inject.Provider;
-import junit.framework.Assert;
+import static org.easymock.EasyMock.anyObject;
+import static org.easymock.EasyMock.capture;
+import static org.easymock.EasyMock.createMockBuilder;
+import static org.easymock.EasyMock.createNiceMock;
+import static org.easymock.EasyMock.createStrictMock;
+import static org.easymock.EasyMock.eq;
+import static org.easymock.EasyMock.expect;
+import static org.easymock.EasyMock.newCapture;
+import static org.easymock.EasyMock.replay;
+import static org.easymock.EasyMock.reset;
+import static org.easymock.EasyMock.verify;
+import static org.junit.Assert.assertEquals;
+
+import java.lang.reflect.Field;
+import java.lang.reflect.Method;
+import java.sql.Connection;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.sql.Statement;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import javax.persistence.EntityManager;
+
 import org.apache.ambari.server.AmbariException;
 import org.apache.ambari.server.api.services.AmbariMetaInfo;
 import org.apache.ambari.server.configuration.Configuration;
@@ -32,40 +53,22 @@ import org.apache.ambari.server.orm.DBAccessor;
 import org.apache.ambari.server.orm.GuiceJpaInitializer;
 import org.apache.ambari.server.orm.InMemoryDefaultTestModule;
 import org.apache.ambari.server.orm.dao.StackDAO;
+import org.apache.ambari.server.state.stack.OsFamily;
 import org.easymock.Capture;
 import org.easymock.CaptureType;
-import org.apache.ambari.server.state.stack.OsFamily;
 import org.easymock.EasyMock;
 import org.junit.After;
 import org.junit.Before;
 import org.junit.BeforeClass;
 import org.junit.Test;
 
-import javax.persistence.EntityManager;
-import java.lang.reflect.Field;
-import java.lang.reflect.Method;
-import java.sql.Connection;
-import java.sql.ResultSet;
-import java.sql.SQLException;
-import java.sql.Statement;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
+import com.google.inject.Binder;
+import com.google.inject.Guice;
+import com.google.inject.Injector;
+import com.google.inject.Module;
+import com.google.inject.Provider;
 
-import static org.easymock.EasyMock.anyObject;
-import static org.easymock.EasyMock.capture;
-import static org.easymock.EasyMock.createMockBuilder;
-import static org.easymock.EasyMock.createNiceMock;
-import static org.easymock.EasyMock.createStrictMock;
-import static org.easymock.EasyMock.eq;
-import static org.easymock.EasyMock.expect;
-import static org.easymock.EasyMock.newCapture;
-import static org.easymock.EasyMock.replay;
-import static org.easymock.EasyMock.reset;
-import static org.easymock.EasyMock.verify;
-import static org.junit.Assert.assertEquals;
+import junit.framework.Assert;
 
 public class UpgradeCatalog240Test {
   private static Injector injector;
@@ -114,7 +117,44 @@ public class UpgradeCatalog240Test {
     expect(connection.createStatement()).andReturn(statement);
     expect(statement.executeQuery(anyObject(String.class))).andReturn(resultSet);
 
-    replay(dbAccessor);
+    Capture<DBAccessor.DBColumnInfo> repoVersionRepoTypeColumnCapture = newCapture();
+    Capture<DBAccessor.DBColumnInfo> repoVersionUrlColumnCapture = newCapture();
+    Capture<DBAccessor.DBColumnInfo> repoVersionXmlColumnCapture = newCapture();
+    Capture<DBAccessor.DBColumnInfo> repoVersionXsdColumnCapture = newCapture();
+    Capture<DBAccessor.DBColumnInfo> repoVersionParentIdColumnCapture = newCapture();
+
+    dbAccessor.addColumn(eq("repo_version"), capture(repoVersionRepoTypeColumnCapture));
+    dbAccessor.addColumn(eq("repo_version"), capture(repoVersionUrlColumnCapture));
+    dbAccessor.addColumn(eq("repo_version"), capture(repoVersionXmlColumnCapture));
+    dbAccessor.addColumn(eq("repo_version"), capture(repoVersionXsdColumnCapture));
+    dbAccessor.addColumn(eq("repo_version"), capture(repoVersionParentIdColumnCapture));
+
+    // skip all of the drama of the servicecomponentdesiredstate table for now
+    expect(dbAccessor.tableHasPrimaryKey("servicecomponentdesiredstate", "id")).andReturn(true);
+
+    Capture<List<DBAccessor.DBColumnInfo>> capturedHistoryColumns = EasyMock.newCapture();
+    dbAccessor.createTable(eq("servicecomponent_history"), capture(capturedHistoryColumns),
+        eq((String[]) null));
+
+    dbAccessor.addPKConstraint("servicecomponent_history", "PK_sc_history", "id");
+    dbAccessor.addFKConstraint("servicecomponent_history", "FK_sc_history_component_id",
+        "component_id", "servicecomponentdesiredstate", "id", false);
+
+    dbAccessor.addFKConstraint("servicecomponent_history", "FK_sc_history_upgrade_id", "upgrade_id",
+        "upgrade", "upgrade_id", false);
+
+    dbAccessor.addFKConstraint("servicecomponent_history", "FK_sc_history_from_stack_id",
+        "from_stack_id", "stack", "stack_id", false);
+
+    dbAccessor.addFKConstraint("servicecomponent_history", "FK_sc_history_to_stack_id",
+        "to_stack_id", "stack", "stack_id", false);
+
+    expect(dbAccessor.getConnection()).andReturn(connection);
+    expect(connection.createStatement()).andReturn(statement);
+    expect(statement.executeQuery(anyObject(String.class))).andReturn(resultSet);
+
+    replay(dbAccessor, configuration, connection, statement, resultSet);
+
     Module module = new Module() {
       @Override
       public void configure(Binder binder) {
@@ -148,6 +188,21 @@ public class UpgradeCatalog240Test {
     for(DBAccessor.DBColumnInfo settingColumnInfo : capturedSettingColumns.getValue()) {
       actualCaptures.put(settingColumnInfo.getName(), settingColumnInfo.getType());
     }
+
+    assertEquals(expectedCaptures, actualCaptures);
+
+    expectedCaptures = new HashMap<>();
+    expectedCaptures.put("id", Long.class);
+    expectedCaptures.put("component_id", Long.class);
+    expectedCaptures.put("upgrade_id", Long.class);
+    expectedCaptures.put("from_stack_id", Long.class);
+    expectedCaptures.put("to_stack_id", Long.class);
+
+    actualCaptures = new HashMap<>();
+    for (DBAccessor.DBColumnInfo historyColumnInfo : capturedHistoryColumns.getValue()) {
+      actualCaptures.put(historyColumnInfo.getName(), historyColumnInfo.getType());
+    }
+
     assertEquals(expectedCaptures, actualCaptures);
 
     verify(dbAccessor);


[06/50] [abbrv] ambari git commit: Merge branch 'trunk' into branch-dev-patch-upgrade

Posted by nc...@apache.org.
Merge branch 'trunk' into branch-dev-patch-upgrade


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/c7be26ad
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/c7be26ad
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/c7be26ad

Branch: refs/heads/trunk
Commit: c7be26ad659d35bc6bc57d3d022da7847a967184
Parents: abc961a 7bac2a1
Author: Nate Cole <nc...@hortonworks.com>
Authored: Tue Jan 26 16:44:49 2016 -0500
Committer: Nate Cole <nc...@hortonworks.com>
Committed: Tue Jan 26 16:44:49 2016 -0500

----------------------------------------------------------------------
 KEYS                                            |  34 ++
 .../libraries/functions/ranger_functions.py     |  27 +-
 .../libraries/functions/ranger_functions_v2.py  |  30 +-
 ambari-server/conf/unix/ca.config               |   2 +-
 ambari-server/conf/windows/ca.config            |   2 +-
 .../server/actionmanager/ActionScheduler.java   |   2 +-
 .../resources/ResourceInstanceFactoryImpl.java  |   4 +
 .../api/services/AdminSettingService.java       | 148 ++++++
 .../server/checks/PreviousUpgradeCompleted.java |  40 +-
 .../server/configuration/Configuration.java     |  26 ++
 .../ambari/server/controller/AmbariServer.java  |   2 +
 .../server/controller/ControllerModule.java     |   2 +
 .../server/controller/KerberosHelperImpl.java   |  14 +-
 .../controller/ResourceProviderFactory.java     |   4 +
 .../AbstractControllerResourceProvider.java     |   2 +
 .../internal/AdminSettingResourceProvider.java  | 245 ++++++++++
 .../internal/DefaultProviderModule.java         |   4 +-
 .../internal/UpgradeResourceProvider.java       |  44 +-
 .../controller/jmx/JMXPropertyProvider.java     |  19 +-
 .../ThreadPoolEnabledPropertyProvider.java      |  40 +-
 .../metrics/timeline/AMSPropertyProvider.java   |  20 +-
 .../ambari/server/controller/spi/Resource.java  |  16 +-
 ...eredThreadPoolExecutorCompletionService.java |  98 ++++
 .../utilities/ScalingThreadPoolExecutor.java    |  48 ++
 .../ambari/server/orm/dao/AdminSettingDAO.java  | 100 ++++
 .../server/orm/entities/AdminSettingEntity.java | 149 ++++++
 .../authorization/RoleAuthorization.java        |   1 +
 .../security/ldap/AmbariLdapDataPopulator.java  |   1 +
 .../PrepareDisableKerberosServerAction.java     |  24 +
 .../server/state/cluster/ClusterImpl.java       |  17 +-
 .../AbstractKerberosDescriptorContainer.java    |  12 +
 .../state/kerberos/KerberosDescriptor.java      |  41 ++
 .../main/resources/Ambari-DDL-Derby-CREATE.sql  |   2 +
 .../main/resources/Ambari-DDL-MySQL-CREATE.sql  |   2 +
 .../main/resources/Ambari-DDL-Oracle-CREATE.sql |   2 +
 .../resources/Ambari-DDL-Postgres-CREATE.sql    |   2 +
 .../Ambari-DDL-Postgres-EMBEDDED-CREATE.sql     |   2 +
 .../resources/Ambari-DDL-SQLAnywhere-CREATE.sql |   2 +
 .../resources/Ambari-DDL-SQLServer-CREATE.sql   |   2 +
 .../src/main/resources/META-INF/persistence.xml |   3 +-
 .../HAWQ/2.0.0/package/scripts/common.py        |  11 -
 .../2.0.0/package/scripts/hawq_constants.py     |   6 +-
 .../HAWQ/2.0.0/package/scripts/hawqsegment.py   |   1 -
 .../HAWQ/2.0.0/package/scripts/master_helper.py |  10 -
 .../HAWQ/2.0.0/package/scripts/params.py        |  28 ++
 .../HAWQ/2.0.0/package/scripts/service_check.py | 144 ++++--
 .../HAWQ/2.0.0/package/scripts/utils.py         |   6 +-
 .../2.0.0/package/templates/hawq-profile.sh.j2  |  27 --
 .../common-services/HDFS/2.1.0.2.0/alerts.json  | 464 +++++++++++++++++++
 .../package/alerts/alert_metrics_deviation.py   | 357 ++++++++++++++
 .../HIVE/0.12.0.2.0/configuration/hive-env.xml  |   8 +-
 .../HIVE/0.12.0.2.0/configuration/hive-site.xml |  16 +
 .../OOZIE/4.0.0.2.0/configuration/oozie-env.xml |   5 -
 .../4.0.0.2.0/configuration/oozie-log4j.xml     |   5 +-
 .../4.2.0.2.3/configuration/oozie-site.xml      |  16 +
 .../common-services/PXF/3.0.0/kerberos.json     |   6 +-
 .../PXF/3.0.0/package/scripts/pxf.py            |  12 +-
 .../SPARK/1.2.0.2.2/package/scripts/params.py   |   1 +
 .../1.2.0.2.2/package/scripts/setup_spark.py    |  12 +-
 .../stacks/HDP/2.0.6/services/stack_advisor.py  |  24 +-
 .../stacks/HDP/2.1/services/stack_advisor.py    |  59 ++-
 .../services/HIVE/configuration/hive-site.xml   |  34 +-
 .../stacks/HDP/2.2/upgrades/upgrade-2.2.xml     |   2 +-
 .../stacks/HDP/2.2/upgrades/upgrade-2.3.xml     |   2 +-
 .../stacks/HDP/2.2/upgrades/upgrade-2.4.xml     |   2 +-
 .../stacks/HDP/2.3/upgrades/config-upgrade.xml  |   2 +-
 .../HDP/2.3/upgrades/nonrolling-upgrade-2.4.xml |   5 +
 .../stacks/HDP/2.3/upgrades/upgrade-2.3.xml     |   2 +-
 .../stacks/HDP/2.3/upgrades/upgrade-2.4.xml     |   3 +-
 .../stacks/HDP/2.4/role_command_order.json      |   8 +
 .../stacks/HDP/2.4/services/SPARK/metainfo.xml  |   1 +
 .../stacks/HDP/2.4/upgrades/config-upgrade.xml  |  32 ++
 .../stacks/HDP/2.4/upgrades/upgrade-2.4.xml     |   2 +-
 .../services/HIVE/configuration/hive-env.xml    |   6 +
 .../services/OOZIE/configuration/oozie-env.xml  |  11 -
 .../stacks/HDPWIN/2.1/services/stack_advisor.py |  14 +
 .../services/HIVE/configuration/hive-site.xml   |  31 +-
 .../stacks/HDPWIN/2.2/services/stack_advisor.py |  48 +-
 .../services/OOZIE/configuration/oozie-env.xml  |  11 -
 .../services/OOZIE/configuration/oozie-site.xml |  33 +-
 .../stacks/HDPWIN/2.3/services/stack_advisor.py |  19 +-
 .../KdcServerConnectionVerificationTest.java    |   8 +-
 .../api/services/AdminSettingServiceTest.java   | 101 ++++
 .../checks/PreviousUpgradeCompletedTest.java    | 220 +++++++++
 .../server/configuration/ConfigurationTest.java |  15 +
 .../AmbariManagementControllerImplTest.java     |  18 +-
 .../AmbariManagementControllerTest.java         |   5 +-
 .../AdminSettingResourceProviderTest.java       | 345 ++++++++++++++
 .../StackDefinedPropertyProviderTest.java       |   4 +
 .../metrics/JMXPropertyProviderTest.java        |  12 +
 .../RestMetricsPropertyProviderTest.java        |   4 +
 ...ThreadPoolExecutorCompletionServiceTest.java | 188 ++++++++
 .../server/orm/dao/AdminSettingDAOTest.java     |  89 ++++
 .../orm/entities/AdminSettingEntityTest.java    |  91 ++++
 .../state/kerberos/KerberosDescriptorTest.java  |   9 +-
 .../stacks/2.1/common/test_stack_advisor.py     | 102 +++-
 .../stacks/2.2/SPARK/test_job_history_server.py |  14 +
 .../stacks/2.2/SPARK/test_spark_client.py       |  14 +
 .../stacks/2.2/common/test_stack_advisor.py     |  10 +-
 .../2.3/SPARK/test_spark_thrift_server.py       |   7 +
 .../src/test/resources/log4j.properties         |   4 +-
 ambari-web/app/assets/test/tests.js             |   1 +
 ambari-web/app/controllers.js                   |   7 +
 .../global/user_settings_controller.js          |  23 +-
 .../hawq/addStandby/step1_controller.js         |  24 +
 .../hawq/addStandby/step2_controller.js         |  35 ++
 .../hawq/addStandby/step3_controller.js         | 146 ++++++
 .../hawq/addStandby/step4_controller.js         |  90 ++++
 .../hawq/addStandby/wizard_controller.js        | 129 ++++++
 .../main/admin/highAvailability_controller.js   |   9 +
 .../main/admin/service_auto_start.js            |  30 ++
 .../alerts/definition_configs_controller.js     |  12 +-
 .../main/host/bulk_operations_controller.js     |  24 +
 .../controllers/main/host/combo_search_box.js   | 130 ++++++
 .../controllers/main/service/info/configs.js    |  17 -
 ambari-web/app/controllers/main/service/item.js |  69 +--
 .../service/widgets/create/step2_controller.js  |   4 +-
 ambari-web/app/controllers/wizard.js            |   2 +
 .../app/controllers/wizard/step0_controller.js  |   5 +
 .../app/controllers/wizard/step3_controller.js  |   6 +-
 .../app/controllers/wizard/step4_controller.js  |   5 +
 .../app/controllers/wizard/step6_controller.js  |  14 +-
 .../app/controllers/wizard/step7_controller.js  |  18 +-
 .../app/data/HDP2.3/hawq_ha_properties.js       |  43 ++
 ambari-web/app/data/HDP2/ui_properties.js       |  22 +-
 ambari-web/app/data/controller_route.js         |   4 +
 ambari-web/app/data/db_properties_info.js       |   6 +-
 .../app/mappers/alert_definitions_mapper.js     |  24 +-
 ambari-web/app/messages.js                      |  42 +-
 .../mixins/common/widgets/time_range_mixin.js   |   2 +-
 .../app/mixins/common/widgets/widget_mixin.js   |  11 +-
 .../app/mixins/common/widgets/widget_section.js |   2 +-
 .../mixins/wizard/assign_master_components.js   |   8 +-
 .../wizard/wizardProgressPageController.js      |  54 ++-
 ambari-web/app/models/alerts/alert_config.js    |  20 +-
 .../app/models/alerts/alert_definition.js       |  32 +-
 ambari-web/app/models/cluster_states.js         |   2 +
 .../configs/objects/service_config_property.js  |  14 +-
 ambari-web/app/models/host_component.js         |   8 +
 ambari-web/app/models/service.js                |   3 +-
 ambari-web/app/router.js                        |   1 +
 .../app/routes/add_hawq_standby_routes.js       | 203 ++++++++
 ambari-web/app/routes/add_service_routes.js     |   5 +-
 ambari-web/app/routes/installer.js              |  80 ++--
 ambari-web/app/routes/main.js                   |  11 +
 ambari-web/app/styles/application.less          |   6 +
 .../highAvailability/hawq/addStandby/step1.hbs  |  28 ++
 .../highAvailability/hawq/addStandby/step3.hbs  |  58 +++
 .../highAvailability/hawq/addStandby/step4.hbs  |  18 +
 .../highAvailability/hawq/addStandby/wizard.hbs |  45 ++
 .../templates/main/admin/service_auto_start.hbs |  32 ++
 ambari-web/app/templates/main/host.hbs          |   2 +-
 ambari-web/app/utils/ajax/ajax.js               |  20 -
 .../app/utils/configs/config_initializer.js     |   2 -
 ambari-web/app/utils/configs/database.js        |   6 +-
 .../utils/configs/hawq_ha_config_initializer.js |  52 +++
 ambari-web/app/utils/db.js                      |   1 +
 ambari-web/app/views.js                         |   6 +
 .../app/views/common/chart/linear_time.js       |  49 +-
 .../views/common/configs/config_history_flow.js |   8 +
 ambari-web/app/views/common/controls_view.js    |  52 +--
 .../app/views/common/quick_view_link_view.js    |   3 +
 ambari-web/app/views/main/admin.js              |   7 +
 .../hawq/addStandby/step1_view.js               |  26 ++
 .../hawq/addStandby/step2_view.js               |  28 ++
 .../hawq/addStandby/step3_view.js               |  29 ++
 .../hawq/addStandby/step4_view.js               |  36 ++
 .../hawq/addStandby/wizard_view.js              |  74 +++
 .../app/views/main/admin/service_auto_start.js  |  28 ++
 .../app/views/main/host/combo_search_box.js     | 107 +----
 .../views/main/host/hosts_table_menu_view.js    |   7 +
 ambari-web/app/views/main/menu.js               |   9 +-
 .../app/views/main/service/info/summary.js      |   9 +-
 ambari-web/app/views/main/service/item.js       |   3 +
 ambari-web/app/views/wizard/step10_view.js      |   3 +-
 ambari-web/app/views/wizard/step3_view.js       |   1 -
 ambari-web/app/views/wizard/step4_view.js       |   3 +-
 ambari-web/app/views/wizard/step7_view.js       |   1 -
 ambari-web/app/views/wizard/step8_view.js       |   1 -
 ambari-web/app/views/wizard/step9_view.js       |   1 -
 .../hawq/addStandby/step3_controller_test.js    | 189 ++++++++
 .../definitions_configs_controller_test.js      |   7 +-
 .../host/bulk_operations_controller_test.js     |  10 +
 .../test/controllers/main/service/item_test.js  |  63 ++-
 ambari-web/test/controllers/main_test.js        |   2 +-
 .../test/controllers/wizard/step0_test.js       |  13 +-
 .../test/controllers/wizard/step3_test.js       |  15 +
 .../test/controllers/wizard/step4_test.js       |  21 +
 .../test/controllers/wizard/step5_test.js       |  20 +
 .../test/controllers/wizard/step7_test.js       |  37 +-
 .../test/mixins/common/widget_mixin_test.js     |   2 +-
 .../utils/configs/config_initializer_test.js    |  30 +-
 .../test/views/common/chart/linear_time_test.js |  70 ++-
 .../test/views/common/controls_view_test.js     |  56 ---
 .../test/views/common/quick_link_view_test.js   |  94 +++-
 .../views/main/service/info/summary_test.js     |  56 +++
 ambari-web/test/views/wizard/step4_view_test.js |   2 +-
 ambari-web/test/views/wizard/step7_view_test.js |   4 +-
 .../view/filebrowser/PropertyValidator.java     |  10 +-
 .../view/filebrowser/PropertyValidatorTest.java |  75 +++
 .../ambari/view/hive/PropertyValidator.java     |  28 +-
 .../ui/hive-web/app/controllers/tez-ui.js       |   2 +-
 .../ui/hive-web/tests/helpers/api-mock.js       |   5 +-
 .../ambari/view/pig/PropertyValidator.java      |  26 +-
 contrib/views/utils/pom.xml                     |   5 +
 .../view/utils/ambari/ValidatorUtils.java       |  46 ++
 206 files changed, 6093 insertions(+), 934 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/c7be26ad/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClusterImpl.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/c7be26ad/ambari-server/src/main/resources/Ambari-DDL-Derby-CREATE.sql
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/c7be26ad/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/c7be26ad/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/c7be26ad/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/c7be26ad/ambari-server/src/main/resources/Ambari-DDL-Postgres-EMBEDDED-CREATE.sql
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/c7be26ad/ambari-server/src/main/resources/Ambari-DDL-SQLAnywhere-CREATE.sql
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/c7be26ad/ambari-server/src/main/resources/Ambari-DDL-SQLServer-CREATE.sql
----------------------------------------------------------------------


[42/50] [abbrv] ambari git commit: AMBARI-15131 - Create Component/Service Upgrade History On Finalize (jonathanhurley)

Posted by nc...@apache.org.
AMBARI-15131 - Create Component/Service Upgrade History On Finalize (jonathanhurley)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/55342fc6
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/55342fc6
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/55342fc6

Branch: refs/heads/trunk
Commit: 55342fc621aed290173cc09f137af97afa18dc0d
Parents: ec91f74
Author: Jonathan Hurley <jh...@hortonworks.com>
Authored: Mon Feb 22 11:01:20 2016 -0500
Committer: Jonathan Hurley <jh...@hortonworks.com>
Committed: Mon Feb 22 11:50:00 2016 -0500

----------------------------------------------------------------------
 .../internal/UpgradeResourceProvider.java       |  16 ++-
 .../upgrades/FinalizeUpgradeAction.java         |  91 +++++++++++++--
 .../upgrades/UpgradeActionTest.java             | 110 ++++++++++++++++++-
 3 files changed, 198 insertions(+), 19 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/55342fc6/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/UpgradeResourceProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/UpgradeResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/UpgradeResourceProvider.java
index 860ba88..0190014 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/UpgradeResourceProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/UpgradeResourceProvider.java
@@ -173,6 +173,8 @@ public class UpgradeResourceProvider extends AbstractControllerResourceProvider
   private static final String COMMAND_PARAM_CLUSTER_NAME = "clusterName";
   private static final String COMMAND_PARAM_DIRECTION = "upgrade_direction";
   private static final String COMMAND_PARAM_UPGRADE_PACK = "upgrade_pack";
+  private static final String COMMAND_PARAM_REQUEST_ID = "request_id";
+
   // TODO AMBARI-12698, change this variable name since it is no longer always a restart. Possible values are rolling_upgrade or nonrolling_upgrade
   // This will involve changing Script.py
   private static final String COMMAND_PARAM_RESTART_TYPE = "restart_type";
@@ -1189,7 +1191,7 @@ public class UpgradeResourceProvider extends AbstractControllerResourceProvider
         new ArrayList<String>(wrapper.getHosts()));
 
     LOG.debug("Analyzing upgrade item {} with tasks: {}.", entity.getText(), entity.getTasks());
-    Map<String, String> params = getNewParameterMap();
+    Map<String, String> params = getNewParameterMap(request);
     params.put(COMMAND_PARAM_TASKS, entity.getTasks());
     params.put(COMMAND_PARAM_VERSION, context.getVersion());
     params.put(COMMAND_PARAM_DIRECTION, context.getDirection().name().toLowerCase());
@@ -1297,7 +1299,7 @@ public class UpgradeResourceProvider extends AbstractControllerResourceProvider
         break;
     }
 
-    Map<String, String> commandParams = getNewParameterMap();
+    Map<String, String> commandParams = getNewParameterMap(request);
 
     // TODO AMBARI-12698, change COMMAND_PARAM_RESTART_TYPE to something that isn't "RESTART" specific.
     if (context.getType() == UpgradeType.ROLLING) {
@@ -1365,7 +1367,7 @@ public class UpgradeResourceProvider extends AbstractControllerResourceProvider
 
     Cluster cluster = context.getCluster();
 
-    Map<String, String> commandParams = getNewParameterMap();
+    Map<String, String> commandParams = getNewParameterMap(request);
     commandParams.put(COMMAND_PARAM_VERSION, context.getVersion());
     commandParams.put(COMMAND_PARAM_DIRECTION, context.getDirection().name().toLowerCase());
     commandParams.put(COMMAND_PARAM_ORIGINAL_STACK, context.getOriginalStackId().getStackId());
@@ -1405,7 +1407,7 @@ public class UpgradeResourceProvider extends AbstractControllerResourceProvider
     stage.setStageId(stageId);
     entity.setStageId(Long.valueOf(stageId));
 
-    Map<String, String> requestParams = getNewParameterMap();
+    Map<String, String> requestParams = getNewParameterMap(request);
     s_commandExecutionHelper.get().addExecutionCommandsToStage(actionContext, stage, requestParams);
 
     request.addStages(Collections.singletonList(stage));
@@ -1432,7 +1434,7 @@ public class UpgradeResourceProvider extends AbstractControllerResourceProvider
 
     Cluster cluster = context.getCluster();
 
-    Map<String, String> commandParams = getNewParameterMap();
+    Map<String, String> commandParams = getNewParameterMap(request);
     commandParams.put(COMMAND_PARAM_CLUSTER_NAME, cluster.getClusterName());
     commandParams.put(COMMAND_PARAM_VERSION, context.getVersion());
     commandParams.put(COMMAND_PARAM_DIRECTION, context.getDirection().name().toLowerCase());
@@ -1543,13 +1545,15 @@ public class UpgradeResourceProvider extends AbstractControllerResourceProvider
    * command was created. For upgrades, this is problematic since the commands
    * are all created ahead of time, but the upgrade may change configs as part
    * of the upgrade pack.</li>
+   * <li>{@link #COMMAND_PARAM_REQUEST_ID}</li> the ID of the request.
    * <ul>
    *
    * @return
    */
-  private Map<String, String> getNewParameterMap() {
+  private Map<String, String> getNewParameterMap(RequestStageContainer requestStageContainer) {
     Map<String, String> parameters = new HashMap<String, String>();
     parameters.put(KeyNames.REFRESH_CONFIG_TAGS_BEFORE_EXECUTION, "*");
+    parameters.put(COMMAND_PARAM_REQUEST_ID, String.valueOf(requestStageContainer.getId()));
     return parameters;
   }
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/55342fc6/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/FinalizeUpgradeAction.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/FinalizeUpgradeAction.java b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/FinalizeUpgradeAction.java
index 03d407a..0c8df78 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/FinalizeUpgradeAction.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/FinalizeUpgradeAction.java
@@ -35,11 +35,18 @@ import org.apache.ambari.server.api.services.AmbariMetaInfo;
 import org.apache.ambari.server.orm.dao.ClusterVersionDAO;
 import org.apache.ambari.server.orm.dao.HostComponentStateDAO;
 import org.apache.ambari.server.orm.dao.HostVersionDAO;
+import org.apache.ambari.server.orm.dao.ServiceComponentDesiredStateDAO;
+import org.apache.ambari.server.orm.dao.StackDAO;
+import org.apache.ambari.server.orm.dao.UpgradeDAO;
 import org.apache.ambari.server.orm.entities.ClusterVersionEntity;
 import org.apache.ambari.server.orm.entities.HostComponentStateEntity;
 import org.apache.ambari.server.orm.entities.HostEntity;
 import org.apache.ambari.server.orm.entities.HostVersionEntity;
 import org.apache.ambari.server.orm.entities.RepositoryVersionEntity;
+import org.apache.ambari.server.orm.entities.ServiceComponentDesiredStateEntity;
+import org.apache.ambari.server.orm.entities.ServiceComponentHistoryEntity;
+import org.apache.ambari.server.orm.entities.StackEntity;
+import org.apache.ambari.server.orm.entities.UpgradeEntity;
 import org.apache.ambari.server.serveraction.AbstractServerAction;
 import org.apache.ambari.server.state.Cluster;
 import org.apache.ambari.server.state.Clusters;
@@ -65,6 +72,7 @@ public class FinalizeUpgradeAction extends AbstractServerAction {
   public static final String CLUSTER_NAME_KEY = "cluster_name";
   public static final String UPGRADE_DIRECTION_KEY = "upgrade_direction";
   public static final String VERSION_KEY = "version";
+  public static final String REQUEST_ID = "request_id";
   public static final String PREVIOUS_UPGRADE_NOT_COMPLETED_MSG = "It is possible that a previous upgrade was not finalized. " +
       "For this reason, Ambari will not remove any configs. Please ensure that all database records are correct.";
 
@@ -97,6 +105,24 @@ public class FinalizeUpgradeAction extends AbstractServerAction {
   @Inject
   private HostComponentStateDAO hostComponentStateDAO;
 
+  /**
+   * Gets {@link StackEntity} instances from {@link StackId}.
+   */
+  @Inject
+  private StackDAO stackDAO;
+
+  /**
+   * Gets desired state entities for service components.
+   */
+  @Inject
+  private ServiceComponentDesiredStateDAO serviceComponentDesiredStateDAO;
+
+  /**
+   * Gets {@link UpgradeEntity} instances.
+   */
+  @Inject
+  private UpgradeDAO upgradeDAO;
+
   @Inject
   private AmbariMetaInfo ambariMetaInfo;
 
@@ -116,10 +142,9 @@ public class FinalizeUpgradeAction extends AbstractServerAction {
     String clusterName = getExecutionCommand().getClusterName();
 
     if (isDowngrade) {
-      return finalizeDowngrade(clusterName, originalStackId, targetStackId,
-          version);
+      return finalizeDowngrade(clusterName, originalStackId, targetStackId, version);
     } else {
-      return finalizeUpgrade(clusterName, version);
+      return finalizeUpgrade(clusterName, version, commandParams);
     }
   }
 
@@ -129,7 +154,8 @@ public class FinalizeUpgradeAction extends AbstractServerAction {
    * @param version     the target version of the upgrade
    * @return the command report
    */
-  private CommandReport finalizeUpgrade(String clusterName, String version)
+  private CommandReport finalizeUpgrade(String clusterName, String version,
+      Map<String, String> commandParams)
     throws AmbariException, InterruptedException {
 
     StringBuilder outSB = new StringBuilder();
@@ -140,6 +166,7 @@ public class FinalizeUpgradeAction extends AbstractServerAction {
 
       Cluster cluster = clusters.getCluster(clusterName);
       StackId clusterDesiredStackId = cluster.getDesiredStackVersion();
+      StackId clusterCurrentStackId = cluster.getCurrentStackVersion();
 
       ClusterVersionEntity upgradingClusterVersion = clusterVersionDAO.findByClusterAndStackAndVersion(
           clusterName, clusterDesiredStackId, version);
@@ -258,7 +285,9 @@ public class FinalizeUpgradeAction extends AbstractServerAction {
             upgradingClusterVersion.getState(), RepositoryVersionState.CURRENT.toString()));
       }
 
-      outSB.append(String.format("Will finalize the upgraded state of host components in %d host(s).\n", hostVersionsAllowed.size()));
+      outSB.append(
+          String.format("Finalizing the upgraded state of host components in %d host(s).\n",
+              hostVersionsAllowed.size()));
 
       // Reset the upgrade state
       for (HostVersionEntity hostVersion : hostVersionsAllowed) {
@@ -269,18 +298,34 @@ public class FinalizeUpgradeAction extends AbstractServerAction {
         }
       }
 
-      outSB.append(String.format("Will finalize the version for %d host(s).\n", hostVersionsAllowed.size()));
-
       // Impacts all hosts that have a version
+      outSB.append(
+          String.format("Finalizing the version for %d host(s).\n", hostVersionsAllowed.size()));
       cluster.mapHostVersions(hostsToUpdate, upgradingClusterVersion, RepositoryVersionState.CURRENT);
 
-      outSB.append(String.format("Will finalize the version for cluster %s.\n", clusterName));
-
       // transitioning the cluster into CURRENT will update the current/desired
       // stack values
+      outSB.append(String.format("Finalizing the version for cluster %s.\n", clusterName));
       cluster.transitionClusterVersion(clusterDesiredStackId, version,
           RepositoryVersionState.CURRENT);
 
+      if (commandParams.containsKey(REQUEST_ID)) {
+        String requestId = commandParams.get(REQUEST_ID);
+        UpgradeEntity upgradeEntity = upgradeDAO.findUpgradeByRequestId(Long.valueOf(requestId));
+
+        if (null != upgradeEntity) {
+          outSB.append("Creating upgrade history.\n");
+          writeComponentHistory(cluster, upgradeEntity, clusterCurrentStackId,
+              clusterDesiredStackId);
+        } else {
+          String warning = String.format(
+              "Unable to create upgrade history because no upgrade could be found for request with ID %s\n",
+              requestId);
+
+          outSB.append(warning);
+        }
+      }
+
       outSB.append("Upgrade was successful!\n");
       return createCommandReport(0, HostRoleStatus.COMPLETED, "{}", outSB.toString(), errSB.toString());
     } catch (Exception e) {
@@ -455,6 +500,33 @@ public class FinalizeUpgradeAction extends AbstractServerAction {
     return errors;
   }
 
+  private void writeComponentHistory(Cluster cluster, UpgradeEntity upgradeEntity,
+      StackId fromStackId, StackId toStackId) {
+
+    StackEntity fromStack = stackDAO.find(fromStackId.getStackName(), fromStackId.getStackVersion());
+    StackEntity toStack = stackDAO.find(toStackId.getStackName(), toStackId.getStackVersion());
+
+    // for every service component, if it was included in the upgrade then
+    // create a historical entry
+    for (Service service : cluster.getServices().values()) {
+      for (ServiceComponent serviceComponent : service.getServiceComponents().values()) {
+        if (serviceComponent.isVersionAdvertised()) {
+          ServiceComponentHistoryEntity historyEntity = new ServiceComponentHistoryEntity();
+          historyEntity.setUpgrade(upgradeEntity);
+          historyEntity.setFromStack(fromStack);
+          historyEntity.setToStack(toStack);
+
+          ServiceComponentDesiredStateEntity desiredStateEntity = serviceComponentDesiredStateDAO.findByName(
+              cluster.getClusterId(), serviceComponent.getServiceName(),
+              serviceComponent.getName());
+
+          historyEntity.setServiceComponentDesiredState(desiredStateEntity);
+          serviceComponentDesiredStateDAO.create(historyEntity);
+        }
+      }
+    }
+  }
+
   protected static class InfoTuple {
     protected final String serviceName;
     protected final String componentName;
@@ -467,7 +539,6 @@ public class FinalizeUpgradeAction extends AbstractServerAction {
       hostName = host;
       currentVersion = version;
     }
-
   }
 
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/55342fc6/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/UpgradeActionTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/UpgradeActionTest.java b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/UpgradeActionTest.java
index 989eba2..f43642c 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/UpgradeActionTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/UpgradeActionTest.java
@@ -50,11 +50,17 @@ import org.apache.ambari.server.orm.dao.ClusterVersionDAO;
 import org.apache.ambari.server.orm.dao.HostDAO;
 import org.apache.ambari.server.orm.dao.HostVersionDAO;
 import org.apache.ambari.server.orm.dao.RepositoryVersionDAO;
+import org.apache.ambari.server.orm.dao.RequestDAO;
+import org.apache.ambari.server.orm.dao.ServiceComponentDesiredStateDAO;
 import org.apache.ambari.server.orm.dao.StackDAO;
+import org.apache.ambari.server.orm.dao.UpgradeDAO;
 import org.apache.ambari.server.orm.entities.ClusterVersionEntity;
 import org.apache.ambari.server.orm.entities.HostVersionEntity;
 import org.apache.ambari.server.orm.entities.RepositoryVersionEntity;
+import org.apache.ambari.server.orm.entities.RequestEntity;
+import org.apache.ambari.server.orm.entities.ServiceComponentHistoryEntity;
 import org.apache.ambari.server.orm.entities.StackEntity;
+import org.apache.ambari.server.orm.entities.UpgradeEntity;
 import org.apache.ambari.server.serveraction.ServerAction;
 import org.apache.ambari.server.state.Cluster;
 import org.apache.ambari.server.state.Clusters;
@@ -73,6 +79,7 @@ import org.apache.ambari.server.state.StackId;
 import org.apache.ambari.server.state.State;
 import org.apache.ambari.server.state.stack.UpgradePack;
 import org.apache.ambari.server.state.stack.upgrade.Direction;
+import org.apache.ambari.server.state.stack.upgrade.UpgradeType;
 import org.junit.After;
 import org.junit.Assert;
 import org.junit.Before;
@@ -109,8 +116,6 @@ public class UpgradeActionTest {
 
   private AmbariManagementController amc;
 
-  private AmbariMetaInfo ambariMetaInfo;
-
   @Inject
   private OrmTestHelper m_helper;
 
@@ -141,6 +146,15 @@ public class UpgradeActionTest {
   @Inject
   private ServiceComponentHostFactory serviceComponentHostFactory;
 
+  @Inject
+  private RequestDAO requestDAO;
+
+  @Inject
+  private UpgradeDAO upgradeDAO;
+
+  @Inject
+  private ServiceComponentDesiredStateDAO serviceComponentDesiredStateDAO;
+
   @Before
   public void setup() throws Exception {
     m_injector = Guice.createInjector(new InMemoryDefaultTestModule());
@@ -150,7 +164,6 @@ public class UpgradeActionTest {
 
     // Initialize AmbariManagementController
     amc = m_injector.getInstance(AmbariManagementController.class);
-    ambariMetaInfo = m_injector.getInstance(AmbariMetaInfo.class);
 
     Field field = AmbariServer.class.getDeclaredField("clusterController");
     field.setAccessible(true);
@@ -907,6 +920,97 @@ public class UpgradeActionTest {
     assertEquals(targetStack, desiredStackId);
   }
 
+  @Test
+  public void testUpgradeHistory() throws Exception {
+    StackId sourceStack = HDP_21_STACK;
+    StackId targetStack = HDP_21_STACK;
+    String sourceRepo = HDP_2_1_1_0;
+    String targetRepo = HDP_2_1_1_1;
+
+    makeUpgradeCluster(sourceStack, sourceRepo, targetStack, targetRepo);
+
+    // Verify the repo before calling Finalize
+    AmbariMetaInfo metaInfo = m_injector.getInstance(AmbariMetaInfo.class);
+    AmbariCustomCommandExecutionHelper helper = m_injector.getInstance(AmbariCustomCommandExecutionHelper.class);
+    Host host = clusters.getHost("h1");
+    Cluster cluster = clusters.getCluster(clusterName);
+
+    // install HDFS with some components
+    Service service = installService(cluster, "HDFS");
+    addServiceComponent(cluster, service, "NAMENODE");
+    addServiceComponent(cluster, service, "DATANODE");
+    ServiceComponentHost nnSCH = createNewServiceComponentHost(cluster, "HDFS", "NAMENODE", "h1");
+    ServiceComponentHost dnSCH = createNewServiceComponentHost(cluster, "HDFS", "DATANODE", "h1");
+
+    // fake their upgrade
+    nnSCH.setStackVersion(nnSCH.getDesiredStackVersion());
+    nnSCH.setVersion(targetRepo);
+    dnSCH.setStackVersion(nnSCH.getDesiredStackVersion());
+    dnSCH.setVersion(targetRepo);
+
+    // create some entities for the finalize action to work with for patch
+    // history
+    RequestEntity requestEntity = new RequestEntity();
+    requestEntity.setClusterId(cluster.getClusterId());
+    requestEntity.setRequestId(1L);
+    requestEntity.setStartTime(System.currentTimeMillis());
+    requestEntity.setCreateTime(System.currentTimeMillis());
+    requestDAO.create(requestEntity);
+
+    UpgradeEntity upgradeEntity = new UpgradeEntity();
+    upgradeEntity.setId(1L);
+    upgradeEntity.setClusterId(cluster.getClusterId());
+    upgradeEntity.setRequestId(requestEntity.getRequestId());
+    upgradeEntity.setUpgradePackage("");
+    upgradeEntity.setFromVersion(sourceRepo);
+    upgradeEntity.setToVersion(targetRepo);
+    upgradeEntity.setUpgradeType(UpgradeType.NON_ROLLING);
+    upgradeDAO.create(upgradeEntity);
+
+    // verify that no history exist exists yet
+    List<ServiceComponentHistoryEntity> historyEntites = serviceComponentDesiredStateDAO.findHistory(
+        cluster.getClusterId(), nnSCH.getServiceName(),
+        nnSCH.getServiceComponentName());
+
+    assertEquals(0, historyEntites.size());
+
+    RepositoryInfo repo = metaInfo.getRepository(sourceStack.getStackName(), sourceStack.getStackVersion(), "redhat6", sourceStack.getStackId());
+    assertEquals(HDP_211_CENTOS6_REPO_URL, repo.getBaseUrl());
+    verifyBaseRepoURL(helper, cluster, host, HDP_211_CENTOS6_REPO_URL);
+
+    // Finalize the upgrade, passing in the request ID so that history is
+    // created
+    Map<String, String> commandParams = new HashMap<String, String>();
+    commandParams.put(FinalizeUpgradeAction.REQUEST_ID, String.valueOf(requestEntity.getRequestId()));
+    commandParams.put(FinalizeUpgradeAction.UPGRADE_DIRECTION_KEY, "upgrade");
+    commandParams.put(FinalizeUpgradeAction.VERSION_KEY, targetRepo);
+
+    ExecutionCommand executionCommand = new ExecutionCommand();
+    executionCommand.setCommandParams(commandParams);
+    executionCommand.setClusterName(clusterName);
+
+    HostRoleCommand hostRoleCommand = hostRoleCommandFactory.create(null, null, null, null);
+    hostRoleCommand.setExecutionCommandWrapper(new ExecutionCommandWrapper(executionCommand));
+
+    FinalizeUpgradeAction action = m_injector.getInstance(FinalizeUpgradeAction.class);
+    action.setExecutionCommand(executionCommand);
+    action.setHostRoleCommand(hostRoleCommand);
+
+    CommandReport report = action.execute(null);
+    assertNotNull(report);
+    assertEquals(HostRoleStatus.COMPLETED.name(), report.getStatus());
+
+    // Verify the metainfo url
+    verifyBaseRepoURL(helper, cluster, host, "http://foo1");
+
+    // ensure that history now exists
+    historyEntites = serviceComponentDesiredStateDAO.findHistory(cluster.getClusterId(),
+        nnSCH.getServiceName(), nnSCH.getServiceComponentName());
+
+    assertEquals(1, historyEntites.size());
+  }
+
+
   private ServiceComponentHost createNewServiceComponentHost(Cluster cluster, String svc,
                                                              String svcComponent, String hostName) throws AmbariException {
     Assert.assertNotNull(cluster.getConfigGroups());


[12/50] [abbrv] ambari git commit: Merge branch 'trunk' into branch-dev-patch-upgrade

Posted by nc...@apache.org.
Merge branch 'trunk' into branch-dev-patch-upgrade


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/4c5d2bd0
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/4c5d2bd0
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/4c5d2bd0

Branch: refs/heads/trunk
Commit: 4c5d2bd0a927f45f99c4045c4c9036859f0d19c8
Parents: 77af8e5 8ba3d0b
Author: Nate Cole <nc...@hortonworks.com>
Authored: Tue Feb 2 07:40:38 2016 -0500
Committer: Nate Cole <nc...@hortonworks.com>
Committed: Tue Feb 2 07:40:38 2016 -0500

----------------------------------------------------------------------
 .../src/main/python/ambari_agent/HostInfo.py    |   4 +-
 ambari-server/conf/unix/ambari.properties       |   3 +
 ambari-server/conf/windows/ambari.properties    |   3 +
 .../resources/ResourceInstanceFactoryImpl.java  |   4 +-
 .../api/services/AdminSettingService.java       | 148 --------
 .../server/api/services/SettingService.java     | 148 ++++++++
 .../StackAdvisorBlueprintProcessor.java         |  66 +++-
 .../server/configuration/Configuration.java     |  20 ++
 .../server/controller/KerberosHelperImpl.java   |  16 +-
 .../internal/AbstractProviderModule.java        | 187 ++++++----
 .../internal/AdminSettingResourceProvider.java  | 250 -------------
 .../internal/DefaultProviderModule.java         |   4 +-
 .../internal/SettingResourceProvider.java       | 251 +++++++++++++
 .../internal/UpgradeResourceProvider.java       |   2 +-
 .../server/controller/jmx/JMXHostProvider.java  |   8 +-
 .../controller/jmx/JMXPropertyProvider.java     |  15 +-
 .../ambari/server/controller/spi/Resource.java  |   4 +-
 .../ambari/server/orm/dao/AdminSettingDAO.java  | 100 ------
 .../ambari/server/orm/dao/SettingDAO.java       | 100 ++++++
 .../server/orm/entities/AdminSettingEntity.java | 149 --------
 .../server/orm/entities/SettingEntity.java      | 149 ++++++++
 .../authorization/RoleAuthorization.java        |   2 +-
 .../serveraction/ServerActionExecutor.java      |  25 +-
 .../main/resources/Ambari-DDL-Derby-CREATE.sql  |   8 +-
 .../main/resources/Ambari-DDL-MySQL-CREATE.sql  |   8 +-
 .../main/resources/Ambari-DDL-Oracle-CREATE.sql |   8 +-
 .../resources/Ambari-DDL-Postgres-CREATE.sql    |   8 +-
 .../Ambari-DDL-Postgres-EMBEDDED-CREATE.sql     |  10 +-
 .../resources/Ambari-DDL-SQLAnywhere-CREATE.sql |   8 +-
 .../resources/Ambari-DDL-SQLServer-CREATE.sql   |   8 +-
 .../src/main/resources/META-INF/persistence.xml |   2 +-
 .../HAWQ/2.0.0/package/scripts/params.py        |  21 --
 .../0.4.0/package/scripts/setup_ranger_xml.py   |   5 +
 .../RANGER/configuration/ranger-admin-site.xml  |  10 +
 .../stacks/HDP/2.3/services/stack_advisor.py    |  11 +
 .../api/services/AdminSettingServiceTest.java   | 101 ------
 .../server/api/services/SettingServiceTest.java | 101 ++++++
 .../StackAdvisorBlueprintProcessorTest.java     | 100 +++++-
 .../server/configuration/ConfigurationTest.java |  15 +
 .../AdminSettingResourceProviderTest.java       | 356 -------------------
 .../internal/JMXHostProviderTest.java           | 130 +++++--
 .../internal/SettingResourceProviderTest.java   | 355 ++++++++++++++++++
 .../metrics/JMXPropertyProviderTest.java        |   6 +-
 .../server/orm/dao/AdminSettingDAOTest.java     |  93 -----
 .../ambari/server/orm/dao/SettingDAOTest.java   |  93 +++++
 .../orm/entities/AdminSettingEntityTest.java    |  91 -----
 .../server/orm/entities/SettingEntityTest.java  |  90 +++++
 .../stacks/2.3/common/test_stack_advisor.py     |   3 +-
 ambari-web/app/messages.js                      |   3 +-
 .../main/alerts/definition_details.hbs          |   6 +
 .../main/alerts/definition_details_view.js      |   6 +
 .../global/cluster_controller_test.js           |   4 +-
 .../progress_controller_test.js                 |   4 +-
 .../admin/kerberos/step4_controller_test.js     |   7 +-
 ...anage_alert_notifications_controller_test.js |  85 +++--
 .../main/host/add_controller_test.js            |   2 +
 .../test/controllers/wizard/step3_test.js       |  43 +--
 .../test/controllers/wizard/step4_test.js       |  19 +-
 .../test/controllers/wizard/step5_test.js       |   7 +-
 .../test/controllers/wizard/step6_test.js       |   6 +-
 .../test/controllers/wizard/step7_test.js       |  13 +-
 .../test/controllers/wizard/step8_test.js       |  19 +-
 .../test/controllers/wizard/step9_test.js       |  23 +-
 ambari-web/test/controllers/wizard_test.js      |   8 +-
 ambari-web/test/mappers/service_mapper_test.js  |   8 +-
 .../common/configs/toggle_isrequired_test.js    |  12 +-
 .../kdc_credentials_controller_mixin_test.js    |  29 +-
 .../test/mixins/common/serverValidator_test.js  |  14 +-
 ambari-web/test/models/cluster_states_test.js   |  20 +-
 .../objects/service_config_property_test.js     |  38 +-
 ambari-web/test/router_test.js                  |  84 ++---
 ambari-web/test/utils/ajax/ajax_queue_test.js   |   2 +-
 ambari-web/test/utils/config_test.js            |  32 +-
 ambari-web/test/utils/date/timezone_test.js     |  24 +-
 ambari-web/test/utils/helper_test.js            |   6 +-
 .../test/views/common/chart/linear_time_test.js |  16 +-
 .../common/configs/service_config_view_test.js  |  18 +-
 .../modal_popups/cluster_check_popup_test.js    |  12 +-
 .../manage_alert_notifications_view_test.js     |  17 +-
 ambari-web/test/views/wizard/step3_view_test.js |  31 +-
 ambari-web/test/views/wizard/step9_view_test.js |  71 ++--
 81 files changed, 2235 insertions(+), 1753 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/4c5d2bd0/ambari-server/src/main/java/org/apache/ambari/server/api/resources/ResourceInstanceFactoryImpl.java
----------------------------------------------------------------------
diff --cc ambari-server/src/main/java/org/apache/ambari/server/api/resources/ResourceInstanceFactoryImpl.java
index 987c11b,4c12094..da00d55
--- a/ambari-server/src/main/java/org/apache/ambari/server/api/resources/ResourceInstanceFactoryImpl.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/api/resources/ResourceInstanceFactoryImpl.java
@@@ -408,14 -408,10 +408,14 @@@ public class ResourceInstanceFactoryImp
          resourceDefinition = new SimpleResourceDefinition(Resource.Type.UserAuthorization, "authorization", "authorizations");
          break;
  
-       case AdminSetting:
-         resourceDefinition = new SimpleResourceDefinition(Resource.Type.AdminSetting, "admin-setting", "admin-settings");
+       case Setting:
+         resourceDefinition = new SimpleResourceDefinition(Resource.Type.Setting, "setting", "settings");
          break;
  
 +      case VersionDefinition:
 +        resourceDefinition = new VersionDefinitionResourceDefinition();
 +        break;
 +
        default:
          throw new IllegalArgumentException("Unsupported resource type: " + type);
      }

http://git-wip-us.apache.org/repos/asf/ambari/blob/4c5d2bd0/ambari-server/src/main/java/org/apache/ambari/server/configuration/Configuration.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/4c5d2bd0/ambari-server/src/main/java/org/apache/ambari/server/controller/spi/Resource.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/4c5d2bd0/ambari-server/src/main/resources/Ambari-DDL-Derby-CREATE.sql
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/4c5d2bd0/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/4c5d2bd0/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/4c5d2bd0/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/4c5d2bd0/ambari-server/src/main/resources/Ambari-DDL-Postgres-EMBEDDED-CREATE.sql
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/4c5d2bd0/ambari-server/src/main/resources/Ambari-DDL-SQLAnywhere-CREATE.sql
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/4c5d2bd0/ambari-server/src/main/resources/Ambari-DDL-SQLServer-CREATE.sql
----------------------------------------------------------------------


[48/50] [abbrv] ambari git commit: AMBARI-14996. Component should support a desired version (merge fix) (ncole)

Posted by nc...@apache.org.
AMBARI-14996. Component should support a desired version (merge fix) (ncole)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/e5d580f2
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/e5d580f2
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/e5d580f2

Branch: refs/heads/trunk
Commit: e5d580f28880a981b16dd5b729243a3eda0cf722
Parents: e06d95d
Author: Nate Cole <nc...@hortonworks.com>
Authored: Tue Feb 23 17:09:03 2016 -0500
Committer: Nate Cole <nc...@hortonworks.com>
Committed: Tue Feb 23 17:09:03 2016 -0500

----------------------------------------------------------------------
 .../ambari/server/agent/HeartbeatProcessor.java | 89 ++++++--------------
 .../server/agent/HeartbeatTestHelper.java       | 42 ++++-----
 2 files changed, 48 insertions(+), 83 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/e5d580f2/ambari-server/src/main/java/org/apache/ambari/server/agent/HeartbeatProcessor.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/agent/HeartbeatProcessor.java b/ambari-server/src/main/java/org/apache/ambari/server/agent/HeartbeatProcessor.java
index 2188a77..5e27a38 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/agent/HeartbeatProcessor.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/agent/HeartbeatProcessor.java
@@ -18,14 +18,18 @@
 package org.apache.ambari.server.agent;
 
 
-import com.google.common.util.concurrent.AbstractScheduledService;
-import com.google.common.util.concurrent.AbstractService;
-import com.google.common.util.concurrent.ThreadFactoryBuilder;
-import com.google.gson.Gson;
-import com.google.gson.JsonSyntaxException;
-import com.google.gson.annotations.SerializedName;
-import com.google.inject.Inject;
-import com.google.inject.Injector;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.concurrent.ConcurrentLinkedQueue;
+import java.util.concurrent.Executors;
+import java.util.concurrent.ScheduledExecutorService;
+import java.util.concurrent.ThreadFactory;
+import java.util.concurrent.TimeUnit;
+
 import org.apache.ambari.server.AmbariException;
 import org.apache.ambari.server.Role;
 import org.apache.ambari.server.RoleCommand;
@@ -40,7 +44,7 @@ import org.apache.ambari.server.controller.MaintenanceStateHelper;
 import org.apache.ambari.server.events.ActionFinalReportReceivedEvent;
 import org.apache.ambari.server.events.AlertEvent;
 import org.apache.ambari.server.events.AlertReceivedEvent;
-import org.apache.ambari.server.events.HostComponentVersionEvent;
+import org.apache.ambari.server.events.HostComponentVersionAdvertisedEvent;
 import org.apache.ambari.server.events.publishers.AlertEventPublisher;
 import org.apache.ambari.server.events.publishers.AmbariEventPublisher;
 import org.apache.ambari.server.events.publishers.VersionEventPublisher;
@@ -58,7 +62,6 @@ import org.apache.ambari.server.state.Service;
 import org.apache.ambari.server.state.ServiceComponent;
 import org.apache.ambari.server.state.ServiceComponentHost;
 import org.apache.ambari.server.state.StackId;
-import org.apache.ambari.server.state.State;
 import org.apache.ambari.server.state.UpgradeState;
 import org.apache.ambari.server.state.fsm.InvalidStateTransitionException;
 import org.apache.ambari.server.state.scheduler.RequestExecution;
@@ -73,17 +76,13 @@ import org.apache.commons.lang.StringUtils;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import java.util.concurrent.ConcurrentLinkedQueue;
-import java.util.concurrent.Executors;
-import java.util.concurrent.ScheduledExecutorService;
-import java.util.concurrent.ThreadFactory;
-import java.util.concurrent.TimeUnit;
+import com.google.common.util.concurrent.AbstractService;
+import com.google.common.util.concurrent.ThreadFactoryBuilder;
+import com.google.gson.Gson;
+import com.google.gson.JsonSyntaxException;
+import com.google.gson.annotations.SerializedName;
+import com.google.inject.Inject;
+import com.google.inject.Injector;
 
 /**
  * HeartbeatProcessor class is used for bulk processing data retrieved from agents in background
@@ -479,10 +478,9 @@ public class HeartbeatProcessor extends AbstractService{
 
               String newVersion = structuredOutput == null ? null : structuredOutput.version;
 
-              // Pass true to always publish a version event.  It is safer to recalculate the version even if we don't
-              // detect a difference in the value.  This is useful in case that a manual database edit is done while
-              // ambari-server is stopped.
-              handleComponentVersionReceived(cl, scHost, newVersion, true);
+              HostComponentVersionAdvertisedEvent event = new HostComponentVersionAdvertisedEvent(cl, scHost, newVersion);
+
+              versionEventPublisher.publish(event);
             }
 
             // Updating stack version, if needed (this is not actually for express/rolling upgrades!)
@@ -535,7 +533,7 @@ public class HeartbeatProcessor extends AbstractService{
               try {
                 ComponentVersionStructuredOut structuredOutput = gson.fromJson(report.getStructuredOut(), ComponentVersionStructuredOut.class);
 
-                if (null != structuredOutput.upgradeDirection && structuredOutput.upgradeDirection.isUpgrade()) {
+                if (null != structuredOutput.upgradeDirection) {
                   scHost.setUpgradeState(UpgradeState.FAILED);
                 }
               } catch (JsonSyntaxException ex) {
@@ -648,7 +646,8 @@ public class HeartbeatProcessor extends AbstractService{
                   if (extra.containsKey("version")) {
                     String version = extra.get("version").toString();
 
-                    handleComponentVersionReceived(cl, scHost, version, false);
+                    HostComponentVersionAdvertisedEvent event = new HostComponentVersionAdvertisedEvent(cl, scHost, version);
+                    versionEventPublisher.publish(event);
                   }
 
                 } catch (Exception e) {
@@ -703,42 +702,6 @@ public class HeartbeatProcessor extends AbstractService{
     }
   }
 
-
-
-  /**
-   * Updates the version of the given service component, sets the upgrade state (if needed)
-   * and publishes a version event through the version event publisher.
-   *
-   * @param cluster        the cluster
-   * @param scHost         service component host
-   * @param newVersion     new version of service component
-   * @param alwaysPublish  if true, always publish a version event; if false,
-   *                       only publish if the component version was updated
-   */
-  private void handleComponentVersionReceived(Cluster cluster, ServiceComponentHost scHost,
-                                              String newVersion, boolean alwaysPublish) {
-
-    boolean updated = false;
-
-    if (StringUtils.isNotBlank(newVersion)) {
-      final String previousVersion = scHost.getVersion();
-      if (!StringUtils.equals(previousVersion, newVersion)) {
-        scHost.setVersion(newVersion);
-        scHost.setStackVersion(cluster.getDesiredStackVersion());
-        if (previousVersion != null && !previousVersion.equalsIgnoreCase(
-            org.apache.ambari.server.state.State.UNKNOWN.toString())) {
-          scHost.setUpgradeState(UpgradeState.COMPLETE);
-        }
-        updated = true;
-      }
-    }
-
-    if (updated || alwaysPublish) {
-      HostComponentVersionEvent event = new HostComponentVersionEvent(cluster, scHost);
-      versionEventPublisher.publish(event);
-    }
-  }
-
   /**
    * This class is used for mapping json of structured output for keytab distribution actions.
    */

http://git-wip-us.apache.org/repos/asf/ambari/blob/e5d580f2/ambari-server/src/test/java/org/apache/ambari/server/agent/HeartbeatTestHelper.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/agent/HeartbeatTestHelper.java b/ambari-server/src/test/java/org/apache/ambari/server/agent/HeartbeatTestHelper.java
index 02974ca..a5a3cb5 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/agent/HeartbeatTestHelper.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/agent/HeartbeatTestHelper.java
@@ -17,11 +17,22 @@
  */
 package org.apache.ambari.server.agent;
 
-import com.google.inject.Inject;
-import com.google.inject.Injector;
-import com.google.inject.Singleton;
-import com.google.inject.persist.UnitOfWork;
-import junit.framework.Assert;
+import static org.apache.ambari.server.agent.DummyHeartbeatConstants.DummyCluster;
+import static org.apache.ambari.server.agent.DummyHeartbeatConstants.DummyHostname1;
+import static org.apache.ambari.server.agent.DummyHeartbeatConstants.DummyOSRelease;
+import static org.apache.ambari.server.agent.DummyHeartbeatConstants.DummyOs;
+import static org.apache.ambari.server.agent.DummyHeartbeatConstants.DummyStackId;
+import static org.apache.ambari.server.agent.DummyHeartbeatConstants.HBASE;
+import static org.easymock.EasyMock.createMockBuilder;
+import static org.easymock.EasyMock.createNiceMock;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
 import org.apache.ambari.server.AmbariException;
 import org.apache.ambari.server.Role;
 import org.apache.ambari.server.RoleCommand;
@@ -55,21 +66,12 @@ import org.apache.ambari.server.state.StackId;
 import org.apache.ambari.server.state.fsm.InvalidStateTransitionException;
 import org.apache.ambari.server.state.svccomphost.ServiceComponentHostStartEvent;
 
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
+import com.google.inject.Inject;
+import com.google.inject.Injector;
+import com.google.inject.Singleton;
+import com.google.inject.persist.UnitOfWork;
 
-import static org.apache.ambari.server.agent.DummyHeartbeatConstants.DummyCluster;
-import static org.apache.ambari.server.agent.DummyHeartbeatConstants.DummyHostname1;
-import static org.apache.ambari.server.agent.DummyHeartbeatConstants.DummyOSRelease;
-import static org.apache.ambari.server.agent.DummyHeartbeatConstants.DummyOs;
-import static org.apache.ambari.server.agent.DummyHeartbeatConstants.DummyStackId;
-import static org.apache.ambari.server.agent.DummyHeartbeatConstants.HBASE;
-import static org.easymock.EasyMock.createMockBuilder;
-import static org.easymock.EasyMock.createNiceMock;
+import junit.framework.Assert;
 
 @Singleton
 public class HeartbeatTestHelper {
@@ -184,7 +186,7 @@ public class HeartbeatTestHelper {
     cluster.setCurrentStackVersion(stackId);
     helper.getOrCreateRepositoryVersion(stackId, stackId.getStackVersion());
     cluster.createClusterVersion(stackId, stackId.getStackVersion(), "admin",
-        RepositoryVersionState.UPGRADING);
+        RepositoryVersionState.INSTALLING);
 
     Set<String> hostNames = new HashSet<String>(){{
       add(DummyHostname1);


[39/50] [abbrv] ambari git commit: Revert "Merge with trunk"

Posted by nc...@apache.org.
Revert "Merge with trunk"

This reverts commit dea22be16c172ed0ca7a6e66ac29bda04027ca2f.


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/4d3839c7
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/4d3839c7
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/4d3839c7

Branch: refs/heads/trunk
Commit: 4d3839c7f9ce921e92de82bc66440167cb6173a9
Parents: dea22be
Author: Nate Cole <nc...@hortonworks.com>
Authored: Mon Feb 22 11:24:56 2016 -0500
Committer: Nate Cole <nc...@hortonworks.com>
Committed: Mon Feb 22 11:24:56 2016 -0500

----------------------------------------------------------------------
 .../python/resource_management/core/logger.py   |   2 +-
 .../timeline/AbstractTimelineMetricsSink.java   |  53 +-----
 .../ApplicationHistoryServer.java               |   6 +-
 .../loadsimulator/net/RestMetricsSender.java    |   4 +-
 .../ApplicationHistoryStoreTestUtils.java       |   2 +-
 .../TestApplicationHistoryClientService.java    |   6 +-
 .../TestFileSystemApplicationHistoryStore.java  |  12 +-
 .../TestMemoryApplicationHistoryStore.java      |  12 +-
 .../webapp/TestAHSWebServices.java              |   2 +-
 .../server/configuration/Configuration.java     |  89 ---------
 .../server/controller/ControllerModule.java     |  16 --
 .../controller/ServiceComponentRequest.java     |  35 +---
 .../controller/ServiceComponentResponse.java    |  22 +--
 .../internal/ComponentResourceProvider.java     |  38 ----
 .../server/orm/dao/HostRoleCommandDAO.java      | 166 +++-------------
 .../orm/entities/HostRoleCommandEntity.java     |   7 +-
 .../ServiceComponentDesiredStateEntity.java     |  11 --
 .../serveraction/ServerActionExecutor.java      |  13 +-
 .../ambari/server/state/ServiceComponent.java   |  14 --
 .../server/state/ServiceComponentImpl.java      |  80 +-------
 .../server/state/cluster/ClusterImpl.java       |  36 ++--
 .../services/AlertNoticeDispatchService.java    |  17 +-
 .../server/upgrade/UpgradeCatalog222.java       |   5 -
 .../server/upgrade/UpgradeCatalog240.java       |  14 --
 .../main/resources/Ambari-DDL-Derby-CREATE.sql  |   2 -
 .../main/resources/Ambari-DDL-MySQL-CREATE.sql  |   3 -
 .../main/resources/Ambari-DDL-Oracle-CREATE.sql |   3 -
 .../resources/Ambari-DDL-Postgres-CREATE.sql    |   5 -
 .../Ambari-DDL-Postgres-EMBEDDED-CREATE.sql     |   3 -
 .../resources/Ambari-DDL-SQLAnywhere-CREATE.sql |   3 -
 .../resources/Ambari-DDL-SQLServer-CREATE.sql   |   5 +-
 .../src/main/resources/alert-templates.xml      |  20 +-
 .../0.8.1.2.2/configuration/kafka-broker.xml    |   1 +
 .../src/main/resources/properties.json          |   1 -
 .../main/resources/scripts/Ambaripreupload.py   |  41 ++--
 .../stacks/HDP/2.0.6/services/stack_advisor.py  |   5 +-
 .../stacks/HDP/2.2/services/stack_advisor.py    |  17 +-
 .../stacks/HDP/2.3/services/stack_advisor.py    |  32 ++--
 .../stacks/HDP/2.4/services/HIVE/metainfo.xml   |  41 +---
 .../actionmanager/TestActionScheduler.java      |  24 +--
 .../ambari/server/agent/AgentResourceTest.java  |   2 -
 .../server/configuration/ConfigurationTest.java |  95 ----------
 .../AmbariManagementControllerTest.java         |   3 -
 .../server/controller/KerberosHelperTest.java   |   2 -
 .../internal/ComponentResourceProviderTest.java |  37 ++--
 .../ambari/server/stack/StackManagerTest.java   |  35 ++--
 .../ambari/server/state/ConfigHelperTest.java   |   2 -
 .../server/upgrade/UpgradeCatalog222Test.java   |  55 +-----
 .../server/upgrade/UpgradeCatalog240Test.java   |  12 --
 .../ambari/server/utils/StageUtilsTest.java     |   2 -
 .../stacks/2.0.6/common/test_stack_advisor.py   |  12 +-
 .../stacks/2.2/common/test_stack_advisor.py     |  14 +-
 .../stacks/2.3/common/test_stack_advisor.py     |  46 -----
 ambari-web/app/assets/test/tests.js             |   1 -
 .../hawq/addStandby/step3_controller.js         |   2 +-
 .../app/mappers/components_state_mapper.js      |   5 -
 ambari-web/app/messages.js                      |   5 -
 .../app/models/alerts/alert_definition.js       |   4 +-
 ambari-web/app/views.js                         |   1 -
 .../configs/widgets/list_config_widget_view.js  |  11 +-
 ambari-web/app/views/main/dashboard/widgets.js  |  19 +-
 .../main/dashboard/widgets/hawqsegment_live.js  | 190 -------------------
 .../dashboard/widgets/hawqsegment_live_test.js  |  69 -------
 63 files changed, 222 insertions(+), 1270 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/4d3839c7/ambari-common/src/main/python/resource_management/core/logger.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/core/logger.py b/ambari-common/src/main/python/resource_management/core/logger.py
index 5bbd35b..fd05b02 100644
--- a/ambari-common/src/main/python/resource_management/core/logger.py
+++ b/ambari-common/src/main/python/resource_management/core/logger.py
@@ -173,4 +173,4 @@ class Logger:
     if arguments_str:
       arguments_str = arguments_str[:-2]
         
-    return unicode("{0} {{{1}}}", 'UTF-8').format(name, arguments_str)
\ No newline at end of file
+    return unicode("{0} {{{1}}}").format(name, arguments_str)
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/4d3839c7/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/AbstractTimelineMetricsSink.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/AbstractTimelineMetricsSink.java b/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/AbstractTimelineMetricsSink.java
index b2810b7..2854898 100644
--- a/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/AbstractTimelineMetricsSink.java
+++ b/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/AbstractTimelineMetricsSink.java
@@ -28,12 +28,9 @@ import javax.net.ssl.HttpsURLConnection;
 import javax.net.ssl.SSLContext;
 import javax.net.ssl.SSLSocketFactory;
 import javax.net.ssl.TrustManagerFactory;
-import java.io.BufferedReader;
 import java.io.File;
 import java.io.FileInputStream;
 import java.io.IOException;
-import java.io.InputStream;
-import java.io.InputStreamReader;
 import java.io.OutputStream;
 import java.net.HttpURLConnection;
 import java.net.URL;
@@ -75,19 +72,17 @@ public abstract class AbstractTimelineMetricsSink {
   protected void emitMetrics(TimelineMetrics metrics) {
     String connectUrl = getCollectorUri();
     int timeout = getTimeoutSeconds() * 1000;
-    HttpURLConnection connection = null;
     try {
       if (connectUrl == null) {
         throw new IOException("Unknown URL. " +
           "Unable to connect to metrics collector.");
       }
       String jsonData = mapper.writeValueAsString(metrics);
-      connection = connectUrl.startsWith("https") ?
+      HttpURLConnection connection = connectUrl.startsWith("https") ?
         getSSLConnection(connectUrl) : getConnection(connectUrl);
 
       connection.setRequestMethod("POST");
       connection.setRequestProperty("Content-Type", "application/json");
-      connection.setRequestProperty("Connection", "Keep-Alive");
       connection.setConnectTimeout(timeout);
       connection.setReadTimeout(timeout);
       connection.setDoOutput(true);
@@ -108,52 +103,14 @@ public abstract class AbstractTimelineMetricsSink {
           LOG.debug("Metrics posted to Collector " + connectUrl);
         }
       }
-      cleanupInputStream(connection.getInputStream());
-    } catch (IOException ioe) {
-      StringBuilder errorMessage =
-        new StringBuilder("Unable to connect to collector, " + connectUrl + "\n");
-      try {
-        if ((connection != null)) {
-          errorMessage.append(cleanupInputStream(connection.getErrorStream()));
-        }
-      } catch (IOException e) {
-        //NOP
-      }
+    } catch (IOException e) {
       if (LOG.isDebugEnabled()) {
-        LOG.debug(errorMessage, ioe);
+        LOG.debug("Unable to connect to collector, " + connectUrl, e);
       } else {
-        LOG.info(errorMessage);
-      }
-      throw new UnableToConnectException(ioe).setConnectUrl(connectUrl);
-    }
-  }
-
-  /**
-   * Cleans up and closes an input stream
-   * see http://docs.oracle.com/javase/6/docs/technotes/guides/net/http-keepalive.html
-   * @param is the InputStream to clean up
-   * @return string read from the InputStream
-   * @throws IOException
-   */
-  private String cleanupInputStream(InputStream is) throws IOException {
-    StringBuilder sb = new StringBuilder();
-    if (is != null) {
-      try (
-        InputStreamReader isr = new InputStreamReader(is);
-        BufferedReader br = new BufferedReader(isr)
-      ) {
-        // read the response body
-        String line;
-        while ((line = br.readLine()) != null) {
-          if (LOG.isDebugEnabled()) {
-            sb.append(line);
-          }
-        }
-      } finally {
-        is.close();
+        LOG.info("Unable to connect to collector, " + connectUrl);
       }
+      throw new UnableToConnectException(e).setConnectUrl(connectUrl);
     }
-    return sb.toString();
   }
 
   // Get a connection

http://git-wip-us.apache.org/repos/asf/ambari/blob/4d3839c7/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryServer.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryServer.java b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryServer.java
index 1ca9c33..62a8cc3 100644
--- a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryServer.java
+++ b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryServer.java
@@ -55,8 +55,8 @@ import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.ti
 public class ApplicationHistoryServer extends CompositeService {
 
   public static final int SHUTDOWN_HOOK_PRIORITY = 30;
-  private static final Log LOG =
-    LogFactory.getLog(ApplicationHistoryServer.class);
+  private static final Log LOG = LogFactory
+    .getLog(ApplicationHistoryServer.class);
 
   ApplicationHistoryClientService ahsClientService;
   ApplicationHistoryManager historyManager;
@@ -172,8 +172,6 @@ public class ApplicationHistoryServer extends CompositeService {
     LOG.info("Instantiating AHSWebApp at " + bindAddress);
     try {
       Configuration conf = metricConfiguration.getMetricsConf();
-      conf.set("hadoop.http.max.threads", String.valueOf(metricConfiguration
-        .getTimelineMetricsServiceHandlerThreadCount()));
       HttpConfig.Policy policy = HttpConfig.Policy.valueOf(
         conf.get(TimelineMetricConfiguration.TIMELINE_SERVICE_HTTP_POLICY,
           HttpConfig.Policy.HTTP_ONLY.name()));

http://git-wip-us.apache.org/repos/asf/ambari/blob/4d3839c7/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/net/RestMetricsSender.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/net/RestMetricsSender.java b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/net/RestMetricsSender.java
index 32af851..0a9a513 100644
--- a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/net/RestMetricsSender.java
+++ b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/net/RestMetricsSender.java
@@ -24,7 +24,6 @@ import org.slf4j.LoggerFactory;
 import java.io.IOException;
 import java.net.MalformedURLException;
 import java.net.ProtocolException;
-import java.util.concurrent.TimeUnit;
 
 /**
  * Implements MetricsSender and provides a way of pushing metrics to application metrics history service using REST
@@ -66,8 +65,7 @@ public class RestMetricsSender implements MetricsSender {
       responseString = svc.send(payload);
 
       timer.stop();
-      LOG.info("http response time: " + timer.elapsed(TimeUnit.MILLISECONDS)
-        + " ms");
+      LOG.info("http response time: " + timer.elapsedMillis() + " ms");
 
       if (responseString.length() > 0) {
         LOG.debug("POST response from server: " + responseString);

http://git-wip-us.apache.org/repos/asf/ambari/blob/4d3839c7/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryStoreTestUtils.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryStoreTestUtils.java b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryStoreTestUtils.java
index ec9b49d..c41b8a7 100644
--- a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryStoreTestUtils.java
+++ b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryStoreTestUtils.java
@@ -58,7 +58,7 @@ public class ApplicationHistoryStoreTestUtils {
       ApplicationAttemptId appAttemptId) throws IOException {
     store.applicationAttemptStarted(ApplicationAttemptStartData.newInstance(
       appAttemptId, appAttemptId.toString(), 0,
-      ContainerId.newContainerId(appAttemptId, 1)));
+      ContainerId.newInstance(appAttemptId, 1)));
   }
 
   protected void writeApplicationAttemptFinishData(

http://git-wip-us.apache.org/repos/asf/ambari/blob/4d3839c7/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestApplicationHistoryClientService.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestApplicationHistoryClientService.java b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestApplicationHistoryClientService.java
index f93ac5e..2fdedb2 100644
--- a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestApplicationHistoryClientService.java
+++ b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestApplicationHistoryClientService.java
@@ -168,7 +168,7 @@ public class TestApplicationHistoryClientService extends
     writeApplicationStartData(appId);
     ApplicationAttemptId appAttemptId =
         ApplicationAttemptId.newInstance(appId, 1);
-    ContainerId containerId = ContainerId.newContainerId(appAttemptId, 1);
+    ContainerId containerId = ContainerId.newInstance(appAttemptId, 1);
     writeContainerStartData(containerId);
     writeContainerFinishData(containerId);
     writeApplicationFinishData(appId);
@@ -189,8 +189,8 @@ public class TestApplicationHistoryClientService extends
     writeApplicationStartData(appId);
     ApplicationAttemptId appAttemptId =
         ApplicationAttemptId.newInstance(appId, 1);
-    ContainerId containerId = ContainerId.newContainerId(appAttemptId, 1);
-    ContainerId containerId1 = ContainerId.newContainerId(appAttemptId, 2);
+    ContainerId containerId = ContainerId.newInstance(appAttemptId, 1);
+    ContainerId containerId1 = ContainerId.newInstance(appAttemptId, 2);
     writeContainerStartData(containerId);
     writeContainerFinishData(containerId);
     writeContainerStartData(containerId1);

http://git-wip-us.apache.org/repos/asf/ambari/blob/4d3839c7/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestFileSystemApplicationHistoryStore.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestFileSystemApplicationHistoryStore.java b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestFileSystemApplicationHistoryStore.java
index 543c25b..bc16d36 100644
--- a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestFileSystemApplicationHistoryStore.java
+++ b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestFileSystemApplicationHistoryStore.java
@@ -94,7 +94,7 @@ public class TestFileSystemApplicationHistoryStore extends
         }
         // write container history data
         for (int k = 1; k <= num; ++k) {
-          ContainerId containerId = ContainerId.newContainerId(appAttemptId, k);
+          ContainerId containerId = ContainerId.newInstance(appAttemptId, k);
           writeContainerStartData(containerId);
           if (missingContainer && k == num) {
             continue;
@@ -144,7 +144,7 @@ public class TestFileSystemApplicationHistoryStore extends
         // read container history data
         Assert.assertEquals(num, store.getContainers(appAttemptId).size());
         for (int k = 1; k <= num; ++k) {
-          ContainerId containerId = ContainerId.newContainerId(appAttemptId, k);
+          ContainerId containerId = ContainerId.newInstance(appAttemptId, k);
           ContainerHistoryData containerData = store.getContainer(containerId);
           Assert.assertNotNull(containerData);
           Assert.assertEquals(Priority.newInstance(containerId.getId()),
@@ -159,7 +159,7 @@ public class TestFileSystemApplicationHistoryStore extends
         ContainerHistoryData masterContainer =
             store.getAMContainer(appAttemptId);
         Assert.assertNotNull(masterContainer);
-        Assert.assertEquals(ContainerId.newContainerId(appAttemptId, 1),
+        Assert.assertEquals(ContainerId.newInstance(appAttemptId, 1),
           masterContainer.getContainerId());
       }
     }
@@ -186,7 +186,7 @@ public class TestFileSystemApplicationHistoryStore extends
       Assert.assertTrue(e.getMessage().contains("is not opened"));
     }
     // write container history data
-    ContainerId containerId = ContainerId.newContainerId(appAttemptId, 1);
+    ContainerId containerId = ContainerId.newInstance(appAttemptId, 1);
     try {
       writeContainerStartData(containerId);
       Assert.fail();
@@ -209,8 +209,8 @@ public class TestFileSystemApplicationHistoryStore extends
     writeApplicationStartData(appId);
     ApplicationAttemptId appAttemptId =
         ApplicationAttemptId.newInstance(appId, 1);
-    for (int i = 1; i <= 1000; ++i) {
-      ContainerId containerId = ContainerId.newContainerId(appAttemptId, i);
+    for (int i = 1; i <= 100000; ++i) {
+      ContainerId containerId = ContainerId.newInstance(appAttemptId, i);
       writeContainerStartData(containerId);
       writeContainerFinishData(containerId);
     }

http://git-wip-us.apache.org/repos/asf/ambari/blob/4d3839c7/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestMemoryApplicationHistoryStore.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestMemoryApplicationHistoryStore.java b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestMemoryApplicationHistoryStore.java
index b4da01a..fc5c096 100644
--- a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestMemoryApplicationHistoryStore.java
+++ b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestMemoryApplicationHistoryStore.java
@@ -137,7 +137,7 @@ public class TestMemoryApplicationHistoryStore extends
     ApplicationId appId = ApplicationId.newInstance(0, 1);
     ApplicationAttemptId appAttemptId =
         ApplicationAttemptId.newInstance(appId, 1);
-    ContainerId containerId = ContainerId.newContainerId(appAttemptId, 1);
+    ContainerId containerId = ContainerId.newInstance(appAttemptId, 1);
     try {
       writeContainerFinishData(containerId);
       Assert.fail();
@@ -149,14 +149,14 @@ public class TestMemoryApplicationHistoryStore extends
     writeApplicationAttemptStartData(appAttemptId);
     int numContainers = 5;
     for (int i = 1; i <= numContainers; ++i) {
-      containerId = ContainerId.newContainerId(appAttemptId, i);
+      containerId = ContainerId.newInstance(appAttemptId, i);
       writeContainerStartData(containerId);
       writeContainerFinishData(containerId);
     }
     Assert
       .assertEquals(numContainers, store.getContainers(appAttemptId).size());
     for (int i = 1; i <= numContainers; ++i) {
-      containerId = ContainerId.newContainerId(appAttemptId, i);
+      containerId = ContainerId.newInstance(appAttemptId, i);
       ContainerHistoryData data = store.getContainer(containerId);
       Assert.assertNotNull(data);
       Assert.assertEquals(Priority.newInstance(containerId.getId()),
@@ -165,11 +165,11 @@ public class TestMemoryApplicationHistoryStore extends
     }
     ContainerHistoryData masterContainer = store.getAMContainer(appAttemptId);
     Assert.assertNotNull(masterContainer);
-    Assert.assertEquals(ContainerId.newContainerId(appAttemptId, 1),
+    Assert.assertEquals(ContainerId.newInstance(appAttemptId, 1),
       masterContainer.getContainerId());
     writeApplicationAttemptFinishData(appAttemptId);
     // Write again
-    containerId = ContainerId.newContainerId(appAttemptId, 1);
+    containerId = ContainerId.newInstance(appAttemptId, 1);
     try {
       writeContainerStartData(containerId);
       Assert.fail();
@@ -195,7 +195,7 @@ public class TestMemoryApplicationHistoryStore extends
     ApplicationAttemptId appAttemptId =
         ApplicationAttemptId.newInstance(appId, 1);
     for (int i = 1; i <= numContainers; ++i) {
-      ContainerId containerId = ContainerId.newContainerId(appAttemptId, i);
+      ContainerId containerId = ContainerId.newInstance(appAttemptId, i);
       writeContainerStartData(containerId);
       writeContainerFinishData(containerId);
     }

http://git-wip-us.apache.org/repos/asf/ambari/blob/4d3839c7/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/TestAHSWebServices.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/TestAHSWebServices.java b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/TestAHSWebServices.java
index 44b3f65..e78dfcc 100644
--- a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/TestAHSWebServices.java
+++ b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/TestAHSWebServices.java
@@ -269,7 +269,7 @@ public class TestAHSWebServices extends JerseyTest {
     ApplicationId appId = ApplicationId.newInstance(0, 1);
     ApplicationAttemptId appAttemptId =
         ApplicationAttemptId.newInstance(appId, 1);
-    ContainerId containerId = ContainerId.newContainerId(appAttemptId, 1);
+    ContainerId containerId = ContainerId.newInstance(appAttemptId, 1);
     WebResource r = resource();
     ClientResponse response =
         r.path("ws").path("v1").path("applicationhistory").path("apps")

http://git-wip-us.apache.org/repos/asf/ambari/blob/4d3839c7/ambari-server/src/main/java/org/apache/ambari/server/configuration/Configuration.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/configuration/Configuration.java b/ambari-server/src/main/java/org/apache/ambari/server/configuration/Configuration.java
index 221b83d..eee4b61 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/configuration/Configuration.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/configuration/Configuration.java
@@ -184,9 +184,6 @@ public class Configuration {
   public static final String LDAP_REFERRAL_KEY = "authentication.ldap.referral";
   public static final String LDAP_PAGINATION_ENABLED_KEY = "authentication.ldap.pagination.enabled";
   public static final String SERVER_EC_CACHE_SIZE = "server.ecCacheSize";
-  public static final String SERVER_HRC_STATUS_SUMMARY_CACHE_ENABLED = "server.hrcStatusSummary.cache.enabled";
-  public static final String SERVER_HRC_STATUS_SUMMARY_CACHE_SIZE = "server.hrcStatusSummary.cache.size";
-  public static final String SERVER_HRC_STATUS_SUMMARY_CACHE_EXPIRY_DURATION = "server.hrcStatusSummary.cache.expiryDuration";
   public static final String SERVER_STALE_CONFIG_CACHE_ENABLED_KEY = "server.cache.isStale.enabled";
   public static final String SERVER_PERSISTENCE_TYPE_KEY = "server.persistence.type";
   public static final String SERVER_JDBC_USER_NAME_KEY = "server.jdbc.user.name";
@@ -281,9 +278,6 @@ public class Configuration {
   public static final String TEMPORARY_KEYSTORE_ACTIVELY_PURGE = "security.temporary.keystore.actibely.purge";
   public static final boolean TEMPORARY_KEYSTORE_ACTIVELY_PURGE_DEFAULT = true;
 
-  // Alerts notifications properties
-  public static final String AMBARI_DISPLAY_URL = "ambari.display.url";
-
   /**
    * Key for repo validation suffixes.
    */
@@ -370,11 +364,6 @@ public class Configuration {
 
   public static final String CUSTOM_ACTION_DEFINITION_KEY = "custom.action.definitions";
   public static final String SHARED_RESOURCES_DIR_KEY = "shared.resources.dir";
-
-  protected static final boolean SERVER_HRC_STATUS_SUMMARY_CACHE_ENABLED_DEFAULT = true;
-  protected static final long SERVER_HRC_STATUS_SUMMARY_CACHE_SIZE_DEFAULT = 10000L;
-  protected static final long SERVER_HRC_STATUS_SUMMARY_CACHE_EXPIRY_DURATION_DEFAULT = 30; //minutes
-
   private static final String CUSTOM_ACTION_DEFINITION_DEF_VALUE = "/var/lib/ambari-server/resources/custom_action_definitions";
 
   private static final long SERVER_EC_CACHE_SIZE_DEFAULT = 10000L;
@@ -1785,75 +1774,6 @@ public class Configuration {
   }
 
   /**
-   * Caching of host role command status summary can be enabled/disabled
-   * through the {@link #SERVER_HRC_STATUS_SUMMARY_CACHE_ENABLED} config property.
-   * This method returns the value of {@link #SERVER_HRC_STATUS_SUMMARY_CACHE_ENABLED}
-   * config property. If this config property is not defined than returns the default defined by {@link #SERVER_HRC_STATUS_SUMMARY_CACHE_ENABLED_DEFAULT}.
-   * @return true if caching is to be enabled otherwise false.
-   */
-  public boolean getHostRoleCommandStatusSummaryCacheEnabled() {
-    String stringValue = properties.getProperty(SERVER_HRC_STATUS_SUMMARY_CACHE_ENABLED);
-    boolean value = SERVER_HRC_STATUS_SUMMARY_CACHE_ENABLED_DEFAULT;
-    if (stringValue != null) {
-      try {
-        value = Boolean.valueOf(stringValue);
-      }
-      catch (NumberFormatException ignored) {
-      }
-
-    }
-
-    return value;
-  }
-
-  /**
-   * In order to avoid the cache storing host role command status summary objects exhaust
-   * memory we set a max record number allowed for the cache. This limit can be configured
-   * through {@link #SERVER_HRC_STATUS_SUMMARY_CACHE_SIZE} config property. The method returns
-   * the value of this config property. If this config property is not defined than
-   * the default value specified by {@link #SERVER_HRC_STATUS_SUMMARY_CACHE_SIZE_DEFAULT} is returned.
-   * @return the upper limit for the number of cached host role command summaries.
-   */
-  public long getHostRoleCommandStatusSummaryCacheSize() {
-    String stringValue = properties.getProperty(SERVER_HRC_STATUS_SUMMARY_CACHE_SIZE);
-    long value = SERVER_HRC_STATUS_SUMMARY_CACHE_SIZE_DEFAULT;
-    if (stringValue != null) {
-      try {
-        value = Long.valueOf(stringValue);
-      }
-      catch (NumberFormatException ignored) {
-      }
-
-    }
-
-    return value;
-  }
-
-  /**
-   * As a safety measure the cache storing host role command status summaries should auto expire after a while.
-   * The expiry duration is specified through the {@link #SERVER_HRC_STATUS_SUMMARY_CACHE_EXPIRY_DURATION} config property
-   * expressed in minutes. The method returns the value of this config property. If this config property is not defined than
-   * the default value specified by {@link #SERVER_HRC_STATUS_SUMMARY_CACHE_EXPIRY_DURATION_DEFAULT}
-   * @return the cache expiry duration in minutes
-   */
-  public long getHostRoleCommandStatusSummaryCacheExpiryDuration() {
-    String stringValue = properties.getProperty(SERVER_HRC_STATUS_SUMMARY_CACHE_EXPIRY_DURATION);
-    long value = SERVER_HRC_STATUS_SUMMARY_CACHE_EXPIRY_DURATION_DEFAULT;
-    if (stringValue != null) {
-      try {
-        value = Long.valueOf(stringValue);
-      }
-      catch (NumberFormatException ignored) {
-      }
-
-    }
-
-    return value;
-  }
-
-
-
-  /**
    * @return whether staleConfig's flag is cached.
    */
   public boolean isStaleConfigCacheEnabled() {
@@ -2581,15 +2501,6 @@ public class Configuration {
   }
 
   /**
-   * Get the ambari display URL
-   * @return
-   */
-  public String getAmbariDisplayUrl() {
-    return properties.getProperty(AMBARI_DISPLAY_URL, null);
-  }
-
-
-  /**
    * @return number of retry attempts for api and blueprint operations
    */
   public int getOperationsRetryAttempts() {

http://git-wip-us.apache.org/repos/asf/ambari/blob/4d3839c7/ambari-server/src/main/java/org/apache/ambari/server/controller/ControllerModule.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/ControllerModule.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/ControllerModule.java
index daca64d..76ff6db 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/ControllerModule.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/ControllerModule.java
@@ -65,7 +65,6 @@ import org.apache.ambari.server.notifications.NotificationDispatcher;
 import org.apache.ambari.server.orm.DBAccessor;
 import org.apache.ambari.server.orm.DBAccessorImpl;
 import org.apache.ambari.server.orm.PersistenceType;
-import org.apache.ambari.server.orm.dao.HostRoleCommandDAO;
 import org.apache.ambari.server.scheduler.ExecutionScheduler;
 import org.apache.ambari.server.scheduler.ExecutionSchedulerImpl;
 import org.apache.ambari.server.security.AmbariEntryPoint;
@@ -339,21 +338,6 @@ public class ControllerModule extends AbstractModule {
     bindConstant().annotatedWith(Names.named("executionCommandCacheSize")).
         to(configuration.getExecutionCommandsCacheSize());
 
-
-    // Host role commands status summary max cache enable/disable
-    bindConstant().annotatedWith(Names.named(HostRoleCommandDAO.HRC_STATUS_SUMMARY_CACHE_ENABLED)).
-      to(configuration.getHostRoleCommandStatusSummaryCacheEnabled());
-
-    // Host role commands status summary max cache size
-    bindConstant().annotatedWith(Names.named(HostRoleCommandDAO.HRC_STATUS_SUMMARY_CACHE_SIZE)).
-      to(configuration.getHostRoleCommandStatusSummaryCacheSize());
-    // Host role command status summary cache expiry duration in minutes
-    bindConstant().annotatedWith(Names.named(HostRoleCommandDAO.HRC_STATUS_SUMMARY_CACHE_EXPIRY_DURATION_MINUTES)).
-      to(configuration.getHostRoleCommandStatusSummaryCacheExpiryDuration());
-
-
-
-
     bind(AmbariManagementController.class).to(
       AmbariManagementControllerImpl.class);
     bind(AbstractRootServiceResponseFactory.class).to(RootServiceResponseFactory.class);

http://git-wip-us.apache.org/repos/asf/ambari/blob/4d3839c7/ambari-server/src/main/java/org/apache/ambari/server/controller/ServiceComponentRequest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/ServiceComponentRequest.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/ServiceComponentRequest.java
index ba0b84f..78b9897 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/ServiceComponentRequest.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/ServiceComponentRequest.java
@@ -31,28 +31,21 @@ public class ServiceComponentRequest {
 
   private String componentCategory;
 
-  private String recoveryEnabled; // CREATE/UPDATE
-
   public ServiceComponentRequest(String clusterName, String serviceName,
                                  String componentName, String desiredState) {
-    this(clusterName, serviceName, componentName, desiredState, null, null);
-  }
-
-  public ServiceComponentRequest(String clusterName, String serviceName,
-                                 String componentName, String desiredState,
-                                 String recoveryEnabled) {
-    this(clusterName, serviceName, componentName, desiredState, recoveryEnabled, null);
+    this.clusterName = clusterName;
+    this.serviceName = serviceName;
+    this.componentName = componentName;
+    this.desiredState = desiredState;
   }
 
   public ServiceComponentRequest(String clusterName,
                                  String serviceName, String componentName,
-                                 String desiredState, String recoveryEnabled,
-                                 String componentCategory) {
+                                 String desiredState, String componentCategory) {
     this.clusterName = clusterName;
     this.serviceName = serviceName;
     this.componentName = componentName;
     this.desiredState = desiredState;
-    this.recoveryEnabled = recoveryEnabled;
     this.componentCategory = componentCategory;
   }
 
@@ -112,20 +105,6 @@ public class ServiceComponentRequest {
     this.clusterName = clusterName;
   }
 
-  /**
-   * @return recoveryEnabled
-   */
-  public String getRecoveryEnabled() {
-    return recoveryEnabled;
-  }
-
-  /**
-   * @param recoveryEnabled the recoveryEnabled value to set.
-   */
-  public void setRecoveryEnabled(String recoveryEnabled) {
-    this.recoveryEnabled = recoveryEnabled;
-  }
-
   public String getComponentCategory() {
     return componentCategory;
   }
@@ -136,7 +115,7 @@ public class ServiceComponentRequest {
 
   @Override
   public String toString() {
-    return String.format("[clusterName=%s, serviceName=%s, componentName=%s, desiredState=%s, recoveryEnabled=%s, componentCategory=%s]",
-        clusterName, serviceName, clusterName, desiredState, recoveryEnabled, componentCategory);
+    return String.format("[clusterName=%s, serviceName=%s, componentName=%s, desiredState=%s, componentCategory=%s]",
+        clusterName, serviceName, clusterName, desiredState, componentCategory);
   }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/4d3839c7/ambari-server/src/main/java/org/apache/ambari/server/controller/ServiceComponentResponse.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/ServiceComponentResponse.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/ServiceComponentResponse.java
index 381b114..f7dd301 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/ServiceComponentResponse.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/ServiceComponentResponse.java
@@ -41,8 +41,6 @@ public class ServiceComponentResponse {
 
   private int installedCount;
 
-  private boolean recoveryEnabled;
-
   public ServiceComponentResponse(Long clusterId, String clusterName,
                                   String serviceName,
                                   String componentName,
@@ -50,8 +48,7 @@ public class ServiceComponentResponse {
                                   String desiredState,
                                   int totalCount,
                                   int startedCount,
-                                  int installedCount,
-                                  boolean recoveryEnabled) {
+                                  int installedCount) {
     super();
     this.clusterId = clusterId;
     this.clusterName = clusterName;
@@ -62,7 +59,6 @@ public class ServiceComponentResponse {
     this.totalCount = totalCount;
     this.startedCount = startedCount;
     this.installedCount = installedCount;
-    this.recoveryEnabled = recoveryEnabled;
   }
 
   /**
@@ -215,22 +211,6 @@ public class ServiceComponentResponse {
     this.totalCount = totalCount;
   }
 
-  /**
-   * Get a true or false value indicating if the service component is auto start enabled
-   * @return true or false
-   */
-  public boolean isRecoveryEnabled() {
-    return recoveryEnabled;
-  }
-
-  /**
-   * Set a true or false value indicating whether the service component is auto start enabled
-   * @param recoveryEnabled
-   */
-  public void setRecoveryEnabled(boolean recoveryEnabled) {
-    this.recoveryEnabled = recoveryEnabled;
-  }
-
   @Override
   public boolean equals(Object o) {
     if (this == o) return true;

http://git-wip-us.apache.org/repos/asf/ambari/blob/4d3839c7/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ComponentResourceProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ComponentResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ComponentResourceProvider.java
index b339adf..3ad6e64 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ComponentResourceProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ComponentResourceProvider.java
@@ -84,7 +84,6 @@ public class ComponentResourceProvider extends AbstractControllerResourceProvide
   protected static final String COMPONENT_TOTAL_COUNT_PROPERTY_ID     = "ServiceComponentInfo/total_count";
   protected static final String COMPONENT_STARTED_COUNT_PROPERTY_ID   = "ServiceComponentInfo/started_count";
   protected static final String COMPONENT_INSTALLED_COUNT_PROPERTY_ID = "ServiceComponentInfo/installed_count";
-  protected static final String COMPONENT_RECOVERY_ENABLED_ID         = "ServiceComponentInfo/recovery_enabled";
 
   private static final String TRUE = "true";
 
@@ -179,7 +178,6 @@ public class ComponentResourceProvider extends AbstractControllerResourceProvide
       setResourceProperty(resource, COMPONENT_TOTAL_COUNT_PROPERTY_ID, response.getTotalCount(), requestedIds);
       setResourceProperty(resource, COMPONENT_STARTED_COUNT_PROPERTY_ID, response.getStartedCount(), requestedIds);
       setResourceProperty(resource, COMPONENT_INSTALLED_COUNT_PROPERTY_ID, response.getInstalledCount(), requestedIds);
-      setResourceProperty(resource, COMPONENT_RECOVERY_ENABLED_ID, String.valueOf(response.isRecoveryEnabled()), requestedIds);
 
       resources.add(resource);
     }
@@ -253,7 +251,6 @@ public class ComponentResourceProvider extends AbstractControllerResourceProvide
         (String) properties.get(COMPONENT_SERVICE_NAME_PROPERTY_ID),
         (String) properties.get(COMPONENT_COMPONENT_NAME_PROPERTY_ID),
         (String) properties.get(COMPONENT_STATE_PROPERTY_ID),
-        (String) properties.get(COMPONENT_RECOVERY_ENABLED_ID),
         (String) properties.get(COMPONENT_CATEGORY_PROPERTY_ID));
   }
 
@@ -466,9 +463,6 @@ public class ComponentResourceProvider extends AbstractControllerResourceProvide
     Map<String, Map<String, Set<String>>> componentNames = new HashMap<>();
     Set<State> seenNewStates = new HashSet<>();
 
-    Collection<ServiceComponent> recoveryEnabledComponents = new ArrayList<>();
-    Collection<ServiceComponent> recoveryDisabledComponents = new ArrayList<>();
-
     // Determine operation level
     Resource.Type reqOpLvl;
     if (requestProperties.containsKey(RequestOperationLevel.OPERATION_LEVEL_ID)) {
@@ -519,20 +513,6 @@ public class ComponentResourceProvider extends AbstractControllerResourceProvide
         continue;
       }
 
-      // Gather the components affected by the change in
-      // auto start state
-      if (!StringUtils.isEmpty(request.getRecoveryEnabled())) {
-        boolean newRecoveryEnabled = Boolean.parseBoolean(request.getRecoveryEnabled());
-        boolean oldRecoveryEnabled = sc.isRecoveryEnabled();
-        if (newRecoveryEnabled != oldRecoveryEnabled) {
-          if (newRecoveryEnabled) {
-            recoveryEnabledComponents.add(sc);
-          } else {
-            recoveryDisabledComponents.add(sc);
-          }
-        }
-      }
-
       if (newState == null) {
         debug("Nothing to do for new updateServiceComponent request, request ={}, newDesiredState=null" + request);
         continue;
@@ -559,11 +539,9 @@ public class ComponentResourceProvider extends AbstractControllerResourceProvide
               + ", clusterId=" + cluster.getClusterId()
               + ", serviceName=" + sc.getServiceName()
               + ", componentName=" + sc.getName()
-              + ", recoveryEnabled=" + sc.isRecoveryEnabled()
               + ", currentDesiredState=" + oldScState
               + ", newDesiredState=" + newState);
         }
-
         if (!changedComps.containsKey(newState)) {
           changedComps.put(newState, new ArrayList<ServiceComponent>());
         }
@@ -571,7 +549,6 @@ public class ComponentResourceProvider extends AbstractControllerResourceProvide
               + ", clusterName=" + clusterName
               + ", serviceName=" + serviceName
               + ", componentName=" + sc.getName()
-              + ", recoveryEnabled=" + sc.isRecoveryEnabled()
               + ", currentDesiredState=" + oldScState
               + ", newDesiredState=" + newState);
 
@@ -585,7 +562,6 @@ public class ComponentResourceProvider extends AbstractControllerResourceProvide
                 + ", clusterName=" + clusterName
                 + ", serviceName=" + serviceName
                 + ", componentName=" + sc.getName()
-                + ", recoveryEnabled=" + sc.isRecoveryEnabled()
                 + ", hostname=" + sch.getHostName()
                 + ", currentState=" + oldSchState
                 + ", newDesiredState=" + newState);
@@ -598,7 +574,6 @@ public class ComponentResourceProvider extends AbstractControllerResourceProvide
                 + ", clusterName=" + clusterName
                 + ", serviceName=" + serviceName
                 + ", componentName=" + sc.getName()
-                + ", recoveryEnabled=" + sc.isRecoveryEnabled()
                 + ", hostname=" + sch.getHostName()
                 + ", currentState=" + oldSchState
                 + ", newDesiredState=" + newState);
@@ -612,7 +587,6 @@ public class ComponentResourceProvider extends AbstractControllerResourceProvide
                 + ", clusterName=" + clusterName
                 + ", serviceName=" + serviceName
                 + ", componentName=" + sc.getName()
-                + ", recoveryEnabled=" + sc.isRecoveryEnabled()
                 + ", hostname=" + sch.getHostName());
 
           continue;
@@ -626,7 +600,6 @@ public class ComponentResourceProvider extends AbstractControllerResourceProvide
               + ", clusterId=" + cluster.getClusterId()
               + ", serviceName=" + sch.getServiceName()
               + ", componentName=" + sch.getServiceComponentName()
-              + ", recoveryEnabled=" + sc.isRecoveryEnabled()
               + ", hostname=" + sch.getHostName()
               + ", currentState=" + oldSchState
               + ", newDesiredState=" + newState);
@@ -642,7 +615,6 @@ public class ComponentResourceProvider extends AbstractControllerResourceProvide
               + ", clusterName=" + clusterName
               + ", serviceName=" + serviceName
               + ", componentName=" + sc.getName()
-              + ", recoveryEnabled=" + sc.isRecoveryEnabled()
               + ", hostname=" + sch.getHostName()
               + ", currentState=" + oldSchState
               + ", newDesiredState=" + newState);
@@ -656,16 +628,6 @@ public class ComponentResourceProvider extends AbstractControllerResourceProvide
 
     // TODO additional validation?
 
-    // Validations completed. Update the affected service components now.
-
-    for (ServiceComponent sc : recoveryEnabledComponents) {
-      sc.setRecoveryEnabled(true);
-    }
-
-    for (ServiceComponent sc : recoveryDisabledComponents) {
-      sc.setRecoveryEnabled(false);
-    }
-
     Cluster cluster = clusters.getCluster(clusterNames.iterator().next());
 
     return getManagementController().createAndPersistStages(cluster, requestProperties, null, null, changedComps, changedScHosts,

http://git-wip-us.apache.org/repos/asf/ambari/blob/4d3839c7/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/HostRoleCommandDAO.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/HostRoleCommandDAO.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/HostRoleCommandDAO.java
index deca9b1..4fd03e5 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/HostRoleCommandDAO.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/HostRoleCommandDAO.java
@@ -18,6 +18,9 @@
 
 package org.apache.ambari.server.orm.dao;
 
+import static org.apache.ambari.server.orm.DBAccessor.DbType.ORACLE;
+import static org.apache.ambari.server.orm.dao.DaoUtils.ORACLE_LIST_LIMIT;
+
 import java.text.MessageFormat;
 import java.util.ArrayList;
 import java.util.Collection;
@@ -25,7 +28,6 @@ import java.util.Collections;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
-import java.util.concurrent.TimeUnit;
 
 import javax.persistence.EntityManager;
 import javax.persistence.TypedQuery;
@@ -47,27 +49,16 @@ import org.apache.ambari.server.orm.entities.HostEntity;
 import org.apache.ambari.server.orm.entities.HostRoleCommandEntity;
 import org.apache.ambari.server.orm.entities.HostRoleCommandEntity_;
 import org.apache.ambari.server.orm.entities.StageEntity;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
 
-import com.google.common.cache.CacheBuilder;
-import com.google.common.cache.CacheLoader;
-import com.google.common.cache.LoadingCache;
 import com.google.common.collect.Lists;
 import com.google.inject.Inject;
 import com.google.inject.Provider;
 import com.google.inject.Singleton;
-import com.google.inject.name.Named;
 import com.google.inject.persist.Transactional;
 
-import static org.apache.ambari.server.orm.DBAccessor.DbType.ORACLE;
-import static org.apache.ambari.server.orm.dao.DaoUtils.ORACLE_LIST_LIMIT;
-
 @Singleton
 public class HostRoleCommandDAO {
 
-  private static final Logger LOG = LoggerFactory.getLogger(HostRoleCommandDAO.class);
-
   private static final String SUMMARY_DTO = String.format(
     "SELECT NEW %s(" +
       "MAX(hrc.stage.skippable), " +
@@ -101,122 +92,12 @@ public class HostRoleCommandDAO {
    */
   private static final String COMPLETED_REQUESTS_SQL = "SELECT DISTINCT task.requestId FROM HostRoleCommandEntity task WHERE task.requestId NOT IN (SELECT task.requestId FROM HostRoleCommandEntity task WHERE task.status IN :notCompletedStatuses) ORDER BY task.requestId {0}";
 
-  /**
-   * A cache that holds {@link HostRoleCommandStatusSummaryDTO} grouped by stage id for requests by request id.
-   * The JPQL computing the host role command status summary for a request is rather expensive
-   * thus this cache helps reducing the load on the database
-   */
-  private final LoadingCache<Long, Map<Long, HostRoleCommandStatusSummaryDTO>> hrcStatusSummaryCache;
-
-  /**
-   * Specifies whether caching for {@link HostRoleCommandStatusSummaryDTO} grouped by stage id for requests
-   * is enabled.
-   */
-  private final boolean hostRoleCommandStatusSummaryCacheEnabled;
-
-
   @Inject
   Provider<EntityManager> entityManagerProvider;
 
   @Inject
   DaoUtils daoUtils;
 
-  public final static String HRC_STATUS_SUMMARY_CACHE_SIZE =  "hostRoleCommandStatusSummaryCacheSize";
-  public final static String HRC_STATUS_SUMMARY_CACHE_EXPIRY_DURATION_MINUTES = "hostRoleCommandStatusCacheExpiryDurationMins";
-  public final static String HRC_STATUS_SUMMARY_CACHE_ENABLED =  "hostRoleCommandStatusSummaryCacheEnabled";
-
-  /**
-   * Invalidates the host role command status summary cache entry that corresponds to the given request.
-   * @param requestId the key of the cache entry to be invalidated.
-   */
-  protected void invalidateHostRoleCommandStatusSummaryCache(Long requestId) {
-    if (!hostRoleCommandStatusSummaryCacheEnabled )
-      return;
-
-    LOG.debug("Invalidating host role command status summary cache for request {} !", requestId);
-    hrcStatusSummaryCache.invalidate(requestId);
-
-  }
-
-  /**
-   * Invalidates those entries in host role command status cache which are dependent on the passed {@link org.apache.ambari.server.orm.entities.HostRoleCommandEntity}
-   * entity.
-   * @param hostRoleCommandEntity
-   */
-  protected void invalidateHostRoleCommandStatusCache(HostRoleCommandEntity hostRoleCommandEntity) {
-    if ( !hostRoleCommandStatusSummaryCacheEnabled )
-      return;
-
-    if (hostRoleCommandEntity != null) {
-      Long requestId = hostRoleCommandEntity.getRequestId();
-      if (requestId == null) {
-        StageEntity stageEntity = hostRoleCommandEntity.getStage();
-        if (stageEntity != null)
-          requestId = stageEntity.getRequestId();
-      }
-
-      if (requestId != null)
-        invalidateHostRoleCommandStatusSummaryCache(requestId.longValue());
-    }
-
-  }
-
-  /**
-   * Loads the counts of tasks for a request and groups them by stage id.
-   * This allows for very efficient loading when there are a huge number of stages
-   * and tasks to iterate (for example, during a Stack Upgrade).
-   * @param requestId the request id
-   * @return the map of stage-to-summary objects
-   */
-  @RequiresSession
-  protected Map<Long, HostRoleCommandStatusSummaryDTO> loadAggregateCounts(Long requestId) {
-
-    TypedQuery<HostRoleCommandStatusSummaryDTO> query = entityManagerProvider.get().createQuery(
-      SUMMARY_DTO, HostRoleCommandStatusSummaryDTO.class);
-
-    query.setParameter("requestId", requestId);
-    query.setParameter("aborted", HostRoleStatus.ABORTED);
-    query.setParameter("completed", HostRoleStatus.COMPLETED);
-    query.setParameter("failed", HostRoleStatus.FAILED);
-    query.setParameter("holding", HostRoleStatus.HOLDING);
-    query.setParameter("holding_failed", HostRoleStatus.HOLDING_FAILED);
-    query.setParameter("holding_timedout", HostRoleStatus.HOLDING_TIMEDOUT);
-    query.setParameter("in_progress", HostRoleStatus.IN_PROGRESS);
-    query.setParameter("pending", HostRoleStatus.PENDING);
-    query.setParameter("queued", HostRoleStatus.QUEUED);
-    query.setParameter("timedout", HostRoleStatus.TIMEDOUT);
-    query.setParameter("skipped_failed", HostRoleStatus.SKIPPED_FAILED);
-
-    Map<Long, HostRoleCommandStatusSummaryDTO> map = new HashMap<Long, HostRoleCommandStatusSummaryDTO>();
-
-    for (HostRoleCommandStatusSummaryDTO dto : daoUtils.selectList(query)) {
-      map.put(dto.getStageId(), dto);
-    }
-
-    return map;
-  }
-
-  @Inject
-  public HostRoleCommandDAO(@Named(HRC_STATUS_SUMMARY_CACHE_ENABLED) boolean hostRoleCommandStatusSummaryCacheEnabled, @Named(HRC_STATUS_SUMMARY_CACHE_SIZE) long hostRoleCommandStatusSummaryCacheLimit, @Named(HRC_STATUS_SUMMARY_CACHE_EXPIRY_DURATION_MINUTES) long hostRoleCommandStatusSummaryCacheExpiryDurationMins) {
-    this.hostRoleCommandStatusSummaryCacheEnabled = hostRoleCommandStatusSummaryCacheEnabled;
-
-    LOG.info("Host role command status summary cache {} !", hostRoleCommandStatusSummaryCacheEnabled ? "enabled" : "disabled");
-
-
-    hrcStatusSummaryCache = CacheBuilder.newBuilder()
-      .maximumSize(hostRoleCommandStatusSummaryCacheLimit)
-      .expireAfterAccess(hostRoleCommandStatusSummaryCacheExpiryDurationMins, TimeUnit.MINUTES)
-      .build(new CacheLoader<Long, Map<Long, HostRoleCommandStatusSummaryDTO>>() {
-        @Override
-        public Map<Long, HostRoleCommandStatusSummaryDTO> load(Long requestId) throws Exception {
-          LOG.debug("Cache miss for host role command status summary object for request {}, fetching from JPA", requestId);
-          Map<Long, HostRoleCommandStatusSummaryDTO> hrcCommandStatusByStageId = loadAggregateCounts(requestId);
-
-          return hrcCommandStatusByStageId;
-        }
-      });
-  }
-
   @RequiresSession
   public HostRoleCommandEntity findByPK(long taskId) {
     return entityManagerProvider.get().find(HostRoleCommandEntity.class, taskId);
@@ -544,16 +425,11 @@ public class HostRoleCommandDAO {
   @Transactional
   public void create(HostRoleCommandEntity stageEntity) {
     entityManagerProvider.get().persist(stageEntity);
-
-    invalidateHostRoleCommandStatusCache(stageEntity);
   }
 
   @Transactional
   public HostRoleCommandEntity merge(HostRoleCommandEntity stageEntity) {
     HostRoleCommandEntity entity = entityManagerProvider.get().merge(stageEntity);
-
-    invalidateHostRoleCommandStatusCache(entity);
-
     return entity;
   }
 
@@ -570,8 +446,6 @@ public class HostRoleCommandDAO {
     List<HostRoleCommandEntity> managedList = new ArrayList<HostRoleCommandEntity>(entities.size());
     for (HostRoleCommandEntity entity : entities) {
       managedList.add(entityManagerProvider.get().merge(entity));
-
-      invalidateHostRoleCommandStatusCache(entity);
     }
     return managedList;
   }
@@ -579,8 +453,6 @@ public class HostRoleCommandDAO {
   @Transactional
   public void remove(HostRoleCommandEntity stageEntity) {
     entityManagerProvider.get().remove(merge(stageEntity));
-
-    invalidateHostRoleCommandStatusCache(stageEntity);
   }
 
   @Transactional
@@ -591,16 +463,38 @@ public class HostRoleCommandDAO {
 
   /**
    * Finds the counts of tasks for a request and groups them by stage id.
+   * This allows for very efficient loading when there are a huge number of stages
+   * and tasks to iterate (for example, during a Stack Upgrade).
    * @param requestId the request id
    * @return the map of stage-to-summary objects
    */
+  @RequiresSession
   public Map<Long, HostRoleCommandStatusSummaryDTO> findAggregateCounts(Long requestId) {
-    if (hostRoleCommandStatusSummaryCacheEnabled)
-      return hrcStatusSummaryCache.getUnchecked(requestId);
-    else
-      return loadAggregateCounts(requestId); // if caching not enabled fall back to fetching through JPA
-  }
 
+    TypedQuery<HostRoleCommandStatusSummaryDTO> query = entityManagerProvider.get().createQuery(
+        SUMMARY_DTO, HostRoleCommandStatusSummaryDTO.class);
+
+    query.setParameter("requestId", requestId);
+    query.setParameter("aborted", HostRoleStatus.ABORTED);
+    query.setParameter("completed", HostRoleStatus.COMPLETED);
+    query.setParameter("failed", HostRoleStatus.FAILED);
+    query.setParameter("holding", HostRoleStatus.HOLDING);
+    query.setParameter("holding_failed", HostRoleStatus.HOLDING_FAILED);
+    query.setParameter("holding_timedout", HostRoleStatus.HOLDING_TIMEDOUT);
+    query.setParameter("in_progress", HostRoleStatus.IN_PROGRESS);
+    query.setParameter("pending", HostRoleStatus.PENDING);
+    query.setParameter("queued", HostRoleStatus.QUEUED);
+    query.setParameter("timedout", HostRoleStatus.TIMEDOUT);
+    query.setParameter("skipped_failed", HostRoleStatus.SKIPPED_FAILED);
+
+    Map<Long, HostRoleCommandStatusSummaryDTO> map = new HashMap<Long, HostRoleCommandStatusSummaryDTO>();
+
+    for (HostRoleCommandStatusSummaryDTO dto : daoUtils.selectList(query)) {
+      map.put(dto.getStageId(), dto);
+    }
+
+    return map;
+  }
 
   /**
    * Updates the {@link HostRoleCommandEntity#isFailureAutoSkipped()} flag for

http://git-wip-us.apache.org/repos/asf/ambari/blob/4d3839c7/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/HostRoleCommandEntity.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/HostRoleCommandEntity.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/HostRoleCommandEntity.java
index 1674175..af71c40 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/HostRoleCommandEntity.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/HostRoleCommandEntity.java
@@ -32,7 +32,6 @@ import javax.persistence.FetchType;
 import javax.persistence.GeneratedValue;
 import javax.persistence.GenerationType;
 import javax.persistence.Id;
-import javax.persistence.Index;
 import javax.persistence.JoinColumn;
 import javax.persistence.JoinColumns;
 import javax.persistence.Lob;
@@ -49,11 +48,7 @@ import org.apache.ambari.server.actionmanager.HostRoleStatus;
 import org.apache.commons.lang.ArrayUtils;
 
 @Entity
-@Table(name = "host_role_command"
-       , indexes = {
-           @Index(name = "idx_hrc_request_id", columnList = "request_id")
-         , @Index(name = "idx_hrc_status_role", columnList = "status, role")
-       })
+@Table(name = "host_role_command")
 @TableGenerator(name = "host_role_command_id_generator",
     table = "ambari_sequences", pkColumnName = "sequence_name", valueColumnName = "sequence_value"
     , pkColumnValue = "host_role_command_id_seq"

http://git-wip-us.apache.org/repos/asf/ambari/blob/4d3839c7/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ServiceComponentDesiredStateEntity.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ServiceComponentDesiredStateEntity.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ServiceComponentDesiredStateEntity.java
index 519e4e6..65cc107 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ServiceComponentDesiredStateEntity.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ServiceComponentDesiredStateEntity.java
@@ -81,9 +81,6 @@ public class ServiceComponentDesiredStateEntity {
   @Enumerated(EnumType.STRING)
   private State desiredState = State.INIT;
 
-  @Column(name = "recovery_enabled", nullable = false, insertable = true, updatable = true)
-  private Integer recoveryEnabled = 0;
-
   /**
    * Unidirectional one-to-one association to {@link StackEntity}
    */
@@ -183,14 +180,6 @@ public class ServiceComponentDesiredStateEntity {
     return serviceComponentHistory;
   }
 
-  public boolean isRecoveryEnabled() {
-    return recoveryEnabled != 0;
-  }
-
-  public void setRecoveryEnabled(boolean recoveryEnabled) {
-    this.recoveryEnabled = (recoveryEnabled == false) ? 0 : 1;
-  }
-
   @Override
   public boolean equals(Object o) {
     if (this == o) {

http://git-wip-us.apache.org/repos/asf/ambari/blob/4d3839c7/ambari-server/src/main/java/org/apache/ambari/server/serveraction/ServerActionExecutor.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/ServerActionExecutor.java b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/ServerActionExecutor.java
index f93cf43..20cf5bb 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/ServerActionExecutor.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/ServerActionExecutor.java
@@ -392,8 +392,17 @@ public class ServerActionExecutor {
    * @throws InterruptedException
    */
   public void doWork() throws InterruptedException {
-    List<HostRoleCommand> tasks = db.getTasksByRoleAndStatus(Role.AMBARI_SERVER_ACTION.name(),
-      HostRoleStatus.QUEUED);
+    List<HostRoleCommand> tasks = db.getTasksByHostRoleAndStatus(serverHostName,
+        Role.AMBARI_SERVER_ACTION.toString(), HostRoleStatus.QUEUED);
+
+    if (null == tasks || tasks.isEmpty()) {
+      // !!! if the server is not a part of the cluster,
+      // !!! just look for anything designated AMBARI_SERVER_ACTION.
+      // !!! do we even need to worry about servername in the first place?  We're
+      // !!! _on_ the ambari server!
+      tasks = db.getTasksByRoleAndStatus(Role.AMBARI_SERVER_ACTION.name(),
+          HostRoleStatus.QUEUED);
+    }
 
     if ((tasks != null) && !tasks.isEmpty()) {
       for (HostRoleCommand task : tasks) {

http://git-wip-us.apache.org/repos/asf/ambari/blob/4d3839c7/ambari-server/src/main/java/org/apache/ambari/server/state/ServiceComponent.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/ServiceComponent.java b/ambari-server/src/main/java/org/apache/ambari/server/state/ServiceComponent.java
index dcb7cf6..7803045 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/ServiceComponent.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/ServiceComponent.java
@@ -28,20 +28,6 @@ public interface ServiceComponent {
 
   String getName();
 
-  /**
-   * Get a true or false value specifying
-   * if auto start was enabled for this component.
-   * @return true or false
-   */
-  boolean isRecoveryEnabled();
-
-  /**
-   * Set a true or false value specifying if this
-   * component is to be enabled for auto start or not.
-   * @param recoveryEnabled - true or false
-   */
-  void setRecoveryEnabled(boolean recoveryEnabled);
-
   String getServiceName();
 
   long getClusterId();

http://git-wip-us.apache.org/repos/asf/ambari/blob/4d3839c7/ambari-server/src/main/java/org/apache/ambari/server/state/ServiceComponentImpl.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/ServiceComponentImpl.java b/ambari-server/src/main/java/org/apache/ambari/server/state/ServiceComponentImpl.java
index defe808..4afc857 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/ServiceComponentImpl.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/ServiceComponentImpl.java
@@ -95,7 +95,6 @@ public class ServiceComponentImpl implements ServiceComponent {
     desiredStateEntity.setDesiredState(State.INIT);
     desiredStateEntity.setServiceName(service.getName());
     desiredStateEntity.setClusterId(service.getClusterId());
-    desiredStateEntity.setRecoveryEnabled(false);
 
     setDesiredStackVersion(service.getDesiredStackVersion());
 
@@ -182,55 +181,6 @@ public class ServiceComponentImpl implements ServiceComponent {
     return componentName;
   }
 
-  /**
-   * Get the recoveryEnabled value.
-   *
-   * @return true or false
-   */
-  @Override
-  public boolean isRecoveryEnabled() {
-    ServiceComponentDesiredStateEntity desiredStateEntity = getDesiredStateEntity();
-    if (desiredStateEntity != null) {
-      return desiredStateEntity.isRecoveryEnabled();
-    } else {
-      LOG.warn("Trying to fetch a member from an entity object that may " +
-              "have been previously deleted, serviceName = " + service.getName() + ", " +
-              "componentName = " + componentName);
-    }
-    return false;
-  }
-
-  /**
-   * Set the recoveryEnabled field in the entity object.
-   *
-   * @param recoveryEnabled - true or false
-   */
-  @Override
-  public void setRecoveryEnabled(boolean recoveryEnabled) {
-    readWriteLock.writeLock().lock();
-    try {
-      if (LOG.isDebugEnabled()) {
-        LOG.debug("Setting RecoveryEnabled of Component" + ", clusterName="
-                + service.getCluster().getClusterName() + ", clusterId="
-                + service.getCluster().getClusterId() + ", serviceName="
-                + service.getName() + ", componentName=" + getName()
-                + ", oldRecoveryEnabled=" + isRecoveryEnabled() + ", newRecoveryEnabled="
-                + recoveryEnabled);
-      }
-      ServiceComponentDesiredStateEntity desiredStateEntity = getDesiredStateEntity();
-      if (desiredStateEntity != null) {
-        desiredStateEntity.setRecoveryEnabled(recoveryEnabled);
-        saveIfPersisted(desiredStateEntity);
-      } else {
-        LOG.warn("Setting a member on an entity object that may have been " +
-                "previously deleted, serviceName = " + service.getName());
-      }
-
-    } finally {
-      readWriteLock.writeLock().unlock();
-    }
-  }
-
   @Override
   public String getServiceName() {
     return service.getName();
@@ -297,8 +247,7 @@ public class ServiceComponentImpl implements ServiceComponent {
               + ", clusterId=" + service.getCluster().getClusterId()
               + ", serviceName=" + service.getName()
               + ", serviceComponentName=" + getName()
-              + ", hostname=" + hostComponent.getHostName()
-              + ", recoveryEnabled=" + isRecoveryEnabled());
+              + ", hostname=" + hostComponent.getHostName());
         }
         if (hostComponents.containsKey(hostComponent.getHostName())) {
           throw new AmbariException("Cannot add duplicate ServiceComponentHost"
@@ -306,8 +255,7 @@ public class ServiceComponentImpl implements ServiceComponent {
               + ", clusterId=" + service.getCluster().getClusterId()
               + ", serviceName=" + service.getName()
               + ", serviceComponentName=" + getName()
-              + ", hostname=" + hostComponent.getHostName()
-              + ", recoveryEnabled=" + isRecoveryEnabled());
+              + ", hostname=" + hostComponent.getHostName());
         }
         // FIXME need a better approach of caching components by host
         ClusterImpl clusterImpl = (ClusterImpl) service.getCluster();
@@ -335,7 +283,6 @@ public class ServiceComponentImpl implements ServiceComponent {
               + ", clusterId=" + service.getCluster().getClusterId()
               + ", serviceName=" + service.getName()
               + ", serviceComponentName=" + getName()
-              + ", recoveryEnabled=" + isRecoveryEnabled()
               + ", hostname=" + hostName);
         }
         if (hostComponents.containsKey(hostName)) {
@@ -344,7 +291,6 @@ public class ServiceComponentImpl implements ServiceComponent {
               + ", clusterId=" + service.getCluster().getClusterId()
               + ", serviceName=" + service.getName()
               + ", serviceComponentName=" + getName()
-              + ", recoveryEnabled=" + isRecoveryEnabled()
               + ", hostname=" + hostName);
         }
         ServiceComponentHost hostComponent = serviceComponentHostFactory.createNew(this, hostName);
@@ -408,11 +354,11 @@ public class ServiceComponentImpl implements ServiceComponent {
     try {
       if (LOG.isDebugEnabled()) {
         LOG.debug("Setting DesiredState of Service" + ", clusterName="
-                + service.getCluster().getClusterName() + ", clusterId="
-                + service.getCluster().getClusterId() + ", serviceName="
-                + service.getName() + ", serviceComponentName=" + getName()
-                + ", oldDesiredState=" + getDesiredState() + ", newDesiredState="
-                + state);
+            + service.getCluster().getClusterName() + ", clusterId="
+            + service.getCluster().getClusterId() + ", serviceName="
+            + service.getName() + ", serviceComponentName=" + getName()
+            + ", oldDesiredState=" + getDesiredState() + ", newDesiredState="
+            + state);
       }
       ServiceComponentDesiredStateEntity desiredStateEntity = getDesiredStateEntity();
       if (desiredStateEntity != null) {
@@ -482,8 +428,7 @@ public class ServiceComponentImpl implements ServiceComponent {
       ServiceComponentResponse r = new ServiceComponentResponse(getClusterId(),
           cluster.getClusterName(), service.getName(), getName(),
           getDesiredStackVersion().getStackId(), getDesiredState().toString(),
-          getTotalCount(), getStartedCount(), getInstalledCount(),
-          isRecoveryEnabled());
+          getTotalCount(), getStartedCount(), getInstalledCount());
       return r;
     } finally {
       readWriteLock.readLock().unlock();
@@ -495,13 +440,11 @@ public class ServiceComponentImpl implements ServiceComponent {
     return service.getCluster().getClusterName();
   }
 
-
   @Override
   public void debugDump(StringBuilder sb) {
     readWriteLock.readLock().lock();
     try {
       sb.append("ServiceComponent={ serviceComponentName=" + getName()
-          + ", recoveryEnabled=" + isRecoveryEnabled()
           + ", clusterName=" + service.getCluster().getClusterName()
           + ", clusterId=" + service.getCluster().getClusterId()
           + ", serviceName=" + service.getName() + ", desiredStackVersion="
@@ -649,7 +592,6 @@ public class ServiceComponentImpl implements ServiceComponent {
                 + ", clusterName=" + getClusterName()
                 + ", serviceName=" + getServiceName()
                 + ", componentName=" + getName()
-                + ", recoveryEnabled=" + isRecoveryEnabled()
                 + ", hostname=" + sch.getHostName());
             return false;
           }
@@ -673,8 +615,7 @@ public class ServiceComponentImpl implements ServiceComponent {
         LOG.info("Deleting all servicecomponenthosts for component"
             + ", clusterName=" + getClusterName()
             + ", serviceName=" + getServiceName()
-            + ", componentName=" + getName()
-            + ", recoveryEnabled=" + isRecoveryEnabled());
+            + ", componentName=" + getName());
         for (ServiceComponentHost sch : hostComponents.values()) {
           if (!sch.canBeRemoved()) {
             throw new AmbariException("Found non removable hostcomponent "
@@ -683,7 +624,6 @@ public class ServiceComponentImpl implements ServiceComponent {
                 + ", clusterName=" + getClusterName()
                 + ", serviceName=" + getServiceName()
                 + ", componentName=" + getName()
-                + ", recoveryEnabled=" + isRecoveryEnabled()
                 + ", hostname=" + sch.getHostName());
           }
         }
@@ -712,14 +652,12 @@ public class ServiceComponentImpl implements ServiceComponent {
             + ", clusterName=" + getClusterName()
             + ", serviceName=" + getServiceName()
             + ", componentName=" + getName()
-            + ", recoveryEnabled=" + isRecoveryEnabled()
             + ", hostname=" + sch.getHostName());
         if (!sch.canBeRemoved()) {
           throw new AmbariException("Could not delete hostcomponent from cluster"
               + ", clusterName=" + getClusterName()
               + ", serviceName=" + getServiceName()
               + ", componentName=" + getName()
-              + ", recoveryEnabled=" + isRecoveryEnabled()
               + ", hostname=" + sch.getHostName());
         }
         sch.delete();

http://git-wip-us.apache.org/repos/asf/ambari/blob/4d3839c7/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClusterImpl.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClusterImpl.java b/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClusterImpl.java
index 07addfc..8230fe3 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClusterImpl.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClusterImpl.java
@@ -2474,31 +2474,13 @@ public class ClusterImpl implements Cluster {
     clusterGlobalLock.readLock().lock();
     try {
       List<ServiceConfigVersionResponse> serviceConfigVersionResponses = new ArrayList<ServiceConfigVersionResponse>();
+      Set<Long> activeIds = getActiveServiceConfigVersionIds();
 
-      List<ServiceConfigEntity> serviceConfigs = serviceConfigDAO.getServiceConfigs(getClusterId());
-      Map<String, ServiceConfigVersionResponse> activeServiceConfigResponses = new HashMap<>();
-
-      for (ServiceConfigEntity serviceConfigEntity : serviceConfigs) {
+      for (ServiceConfigEntity serviceConfigEntity : serviceConfigDAO.getServiceConfigs(getClusterId())) {
         ServiceConfigVersionResponse serviceConfigVersionResponse = convertToServiceConfigVersionResponse(serviceConfigEntity);
 
-        ServiceConfigVersionResponse activeServiceConfigResponse = activeServiceConfigResponses.get(serviceConfigVersionResponse.getServiceName());
-        if (activeServiceConfigResponse == null) {
-          activeServiceConfigResponse = serviceConfigVersionResponse;
-          activeServiceConfigResponses.put(serviceConfigVersionResponse.getServiceName(), serviceConfigVersionResponse);
-        }
-
         serviceConfigVersionResponse.setConfigurations(new ArrayList<ConfigurationResponse>());
-
-        if (serviceConfigEntity.getGroupId() == null) {
-          if (serviceConfigVersionResponse.getCreateTime() > activeServiceConfigResponse.getCreateTime())
-            activeServiceConfigResponses.put(serviceConfigVersionResponse.getServiceName(), serviceConfigVersionResponse);
-        }
-        else if (clusterConfigGroups != null && clusterConfigGroups.containsKey(serviceConfigEntity.getGroupId())){
-          if (serviceConfigVersionResponse.getVersion() > activeServiceConfigResponse.getVersion())
-            activeServiceConfigResponses.put(serviceConfigVersionResponse.getServiceName(), serviceConfigVersionResponse);
-        }
-
-        serviceConfigVersionResponse.setIsCurrent(false);
+        serviceConfigVersionResponse.setIsCurrent(activeIds.contains(serviceConfigEntity.getServiceConfigId()));
 
         List<ClusterConfigEntity> clusterConfigEntities = serviceConfigEntity.getClusterConfigEntities();
         for (ClusterConfigEntity clusterConfigEntity : clusterConfigEntities) {
@@ -2514,10 +2496,6 @@ public class ClusterImpl implements Cluster {
         serviceConfigVersionResponses.add(serviceConfigVersionResponse);
       }
 
-      for (ServiceConfigVersionResponse serviceConfigVersionResponse: activeServiceConfigResponses.values()) {
-        serviceConfigVersionResponse.setIsCurrent(true);
-      }
-
       return serviceConfigVersionResponses;
     } finally {
       clusterGlobalLock.readLock().unlock();
@@ -2536,6 +2514,14 @@ public class ClusterImpl implements Cluster {
     return responses;
   }
 
+  private Set<Long> getActiveServiceConfigVersionIds() {
+    Set<Long> idSet = new HashSet<Long>();
+    for (ServiceConfigEntity entity : getActiveServiceConfigVersionEntities()) {
+      idSet.add(entity.getServiceConfigId());
+    }
+    return idSet;
+  }
+
   private List<ServiceConfigEntity> getActiveServiceConfigVersionEntities() {
 
     List<ServiceConfigEntity> activeServiceConfigVersions = new ArrayList<ServiceConfigEntity>();

http://git-wip-us.apache.org/repos/asf/ambari/blob/4d3839c7/ambari-server/src/main/java/org/apache/ambari/server/state/services/AlertNoticeDispatchService.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/services/AlertNoticeDispatchService.java b/ambari-server/src/main/java/org/apache/ambari/server/state/services/AlertNoticeDispatchService.java
index 0b84568..a27bc1d 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/services/AlertNoticeDispatchService.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/services/AlertNoticeDispatchService.java
@@ -451,7 +451,7 @@ public class AlertNoticeDispatchService extends AbstractScheduledService {
     String targetType = target.getNotificationType();
 
     // build the velocity objects for template rendering
-    AmbariInfo ambari = new AmbariInfo(m_metaInfo.get(), m_configuration);
+    AmbariInfo ambari = new AmbariInfo(m_metaInfo.get());
     AlertSummaryInfo summary = new AlertSummaryInfo(histories);
     DispatchInfo dispatch = new DispatchInfo(target);
 
@@ -516,7 +516,7 @@ public class AlertNoticeDispatchService extends AbstractScheduledService {
     String targetType = target.getNotificationType();
 
     // build the velocity objects for template rendering
-    AmbariInfo ambari = new AmbariInfo(m_metaInfo.get(), m_configuration);
+    AmbariInfo ambari = new AmbariInfo(m_metaInfo.get());
     AlertInfo alert = new AlertInfo(history);
     DispatchInfo dispatch = new DispatchInfo(target);
 
@@ -558,10 +558,6 @@ public class AlertNoticeDispatchService extends AbstractScheduledService {
         bodyWriter.write(alert.getAlertName());
         bodyWriter.write(" ");
         bodyWriter.write(alert.getAlertText());
-        if (alert.hasHostName()) {
-          bodyWriter.write(" ");
-          bodyWriter.append(alert.getHostName());
-        }
         bodyWriter.write("\n");
       }
     }
@@ -1046,8 +1042,7 @@ public class AlertNoticeDispatchService extends AbstractScheduledService {
      *
      * @param metaInfo
      */
-    protected AmbariInfo(AmbariMetaInfo metaInfo, Configuration m_configuration) {
-      m_url = m_configuration.getAmbariDisplayUrl();
+    protected AmbariInfo(AmbariMetaInfo metaInfo) {
       m_version = metaInfo.getServerVersion();
     }
 
@@ -1058,10 +1053,6 @@ public class AlertNoticeDispatchService extends AbstractScheduledService {
       return m_hostName;
     }
 
-    public boolean hasUrl() {
-      return m_url != null;
-    }
-
     /**
      * @return the url
      */
@@ -1202,4 +1193,4 @@ public class AlertNoticeDispatchService extends AbstractScheduledService {
       return m_body;
     }
   }
-}
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/4d3839c7/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog222.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog222.java b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog222.java
index 0aa1e7a..88b3151 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog222.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog222.java
@@ -115,7 +115,6 @@ public class UpgradeCatalog222 extends AbstractUpgradeCatalog {
     updateAlerts();
     updateStormConfigs();
     updateAMSConfigs();
-    updateHostRoleCommands();
   }
 
   protected void updateStormConfigs() throws  AmbariException {
@@ -154,10 +153,6 @@ public class UpgradeCatalog222 extends AbstractUpgradeCatalog {
 
   }
 
-  protected void updateHostRoleCommands() throws SQLException{
-    dbAccessor.createIndex("idx_hrc_status", "host_role_command", "status", "role");
-  }
-
   protected void updateAMSConfigs() throws AmbariException {
     AmbariManagementController ambariManagementController = injector.getInstance(AmbariManagementController.class);
     Clusters clusters = ambariManagementController.getClusters();

http://git-wip-us.apache.org/repos/asf/ambari/blob/4d3839c7/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog240.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog240.java b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog240.java
index 4e99c89..09f31e4 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog240.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog240.java
@@ -32,7 +32,6 @@ import java.util.concurrent.atomic.AtomicLong;
 
 import org.apache.ambari.server.AmbariException;
 import org.apache.ambari.server.controller.AmbariManagementController;
-import org.apache.ambari.server.orm.DBAccessor;
 import org.apache.ambari.server.orm.DBAccessor.DBColumnInfo;
 import org.apache.ambari.server.orm.dao.AlertDefinitionDAO;
 import org.apache.ambari.server.orm.dao.DaoUtils;
@@ -90,8 +89,6 @@ public class UpgradeCatalog240 extends AbstractUpgradeCatalog {
   private static final String ID = "id";
   private static final String SETTING_TABLE = "setting";
 
-  protected static final String SERVICE_COMPONENT_DESIRED_STATE_TABLE = "servicecomponentdesiredstate";
-  protected static final String RECOVERY_ENABLED_COL = "recovery_enabled";
 
   // ----- Constructors ------------------------------------------------------
 
@@ -130,7 +127,6 @@ public class UpgradeCatalog240 extends AbstractUpgradeCatalog {
   @Override
   protected void executeDDLUpdates() throws AmbariException, SQLException {
     updateAdminPermissionTable();
-    updateServiceComponentDesiredStateTable();
     createSettingTable();
     updateRepoVersionTableDDL();
     updateServiceComponentDesiredStateTableDDL();
@@ -566,14 +562,4 @@ public class UpgradeCatalog240 extends AbstractUpgradeCatalog {
 
     addSequence("servicecomponent_history_id_seq", 0L, false);
   }
-  /**
-   * Alter servicecomponentdesiredstate table to add recovery_enabled column.
-   * @throws SQLException
-   */
-  private void updateServiceComponentDesiredStateTable() throws SQLException {
-    // ALTER TABLE servicecomponentdesiredstate ADD COLUMN
-    // recovery_enabled SMALLINT DEFAULT 0 NOT NULL
-    dbAccessor.addColumn(SERVICE_COMPONENT_DESIRED_STATE_TABLE,
-            new DBAccessor.DBColumnInfo(RECOVERY_ENABLED_COL, Short.class, null, 0, false));
-  }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/4d3839c7/ambari-server/src/main/resources/Ambari-DDL-Derby-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-Derby-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-Derby-CREATE.sql
index 73cf84e..2db745b 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-Derby-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-Derby-CREATE.sql
@@ -177,10 +177,8 @@ CREATE TABLE servicecomponentdesiredstate (
   desired_stack_id BIGINT NOT NULL,
   desired_state VARCHAR(255) NOT NULL,
   service_name VARCHAR(255) NOT NULL,
-  recovery_enabled SMALLINT NOT NULL DEFAULT 0,
   CONSTRAINT pk_sc_desiredstate PRIMARY KEY (id),
   CONSTRAINT unq_scdesiredstate_name UNIQUE(component_name, service_name, cluster_id)
-  PRIMARY KEY (component_name, cluster_id, service_name)
 );
 
 CREATE TABLE servicedesiredstate (

http://git-wip-us.apache.org/repos/asf/ambari/blob/4d3839c7/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql
index 9353ac2..b892bc8 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql
@@ -178,10 +178,8 @@ CREATE TABLE servicecomponentdesiredstate (
   desired_stack_id BIGINT NOT NULL,
   desired_state VARCHAR(255) NOT NULL,
   service_name VARCHAR(100) NOT NULL,
-  recovery_enabled SMALLINT NOT NULL DEFAULT 0,
   CONSTRAINT pk_sc_desiredstate PRIMARY KEY (id),
   CONSTRAINT unq_scdesiredstate_name UNIQUE(component_name, service_name, cluster_id)
-  PRIMARY KEY (component_name, cluster_id, service_name)
 );
 
 CREATE TABLE servicedesiredstate (
@@ -685,7 +683,6 @@ CREATE TABLE setting (
 -- tasks indices --
 CREATE INDEX idx_stage_request_id ON stage (request_id);
 CREATE INDEX idx_hrc_request_id ON host_role_command (request_id);
-CREATE INDEX idx_hrc_status_role ON host_role_command (status, role);
 CREATE INDEX idx_rsc_request_id ON role_success_criteria (request_id);
 
 -- altering tables by creating unique constraints----------

http://git-wip-us.apache.org/repos/asf/ambari/blob/4d3839c7/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql
index 5f39b44..026efea 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql
@@ -168,10 +168,8 @@ CREATE TABLE servicecomponentdesiredstate (
   desired_stack_id NUMBER(19) NOT NULL,
   desired_state VARCHAR2(255) NOT NULL,
   service_name VARCHAR2(255) NOT NULL,
-  recovery_enabled SMALLINT DEFAULT 0 NOT NULL,
   CONSTRAINT pk_sc_desiredstate PRIMARY KEY (id),
   CONSTRAINT unq_scdesiredstate_name UNIQUE(component_name, service_name, cluster_id)
-  PRIMARY KEY (component_name, cluster_id, service_name)
 );
 
 CREATE TABLE servicedesiredstate (
@@ -674,7 +672,6 @@ CREATE TABLE setting (
 -- tasks indices --
 CREATE INDEX idx_stage_request_id ON stage (request_id);
 CREATE INDEX idx_hrc_request_id ON host_role_command (request_id);
-CREATE INDEX idx_hrc_status_role ON host_role_command (status, role);
 CREATE INDEX idx_rsc_request_id ON role_success_criteria (request_id);
 
 --------altering tables by creating unique constraints----------

http://git-wip-us.apache.org/repos/asf/ambari/blob/4d3839c7/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql
index 4a8fa2a..fb9889d 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql
@@ -177,10 +177,8 @@ CREATE TABLE servicecomponentdesiredstate (
   desired_stack_id BIGINT NOT NULL,
   desired_state VARCHAR(255) NOT NULL,
   service_name VARCHAR(255) NOT NULL,
-  recovery_enabled SMALLINT NOT NULL DEFAULT 0,
   CONSTRAINT pk_sc_desiredstate PRIMARY KEY (id),
   CONSTRAINT unq_scdesiredstate_name UNIQUE(component_name, service_name, cluster_id)
-  PRIMARY KEY (component_name, cluster_id, service_name)
 );
 
 CREATE TABLE servicedesiredstate (
@@ -678,11 +676,8 @@ CREATE TABLE setting (
 -- tasks indices --
 CREATE INDEX idx_stage_request_id ON stage (request_id);
 CREATE INDEX idx_hrc_request_id ON host_role_command (request_id);
-CREATE INDEX idx_hrc_status_role ON host_role_command (status, role);
 CREATE INDEX idx_rsc_request_id ON role_success_criteria (request_id);
 
-
-
 --------altering tables by creating unique constraints----------
 ALTER TABLE users ADD CONSTRAINT UNQ_users_0 UNIQUE (user_name, user_type);
 ALTER TABLE clusterconfig ADD CONSTRAINT UQ_config_type_tag UNIQUE (cluster_id, type_name, version_tag);


[21/50] [abbrv] ambari git commit: Merge branch 'trunk' into branch-dev-patch-upgrade

Posted by nc...@apache.org.
Merge branch 'trunk' into branch-dev-patch-upgrade


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/d3db8dad
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/d3db8dad
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/d3db8dad

Branch: refs/heads/trunk
Commit: d3db8daddcdae75f5ccff2760b7bd646e7d63bce
Parents: c11a411 f60472d
Author: Nate Cole <nc...@hortonworks.com>
Authored: Mon Feb 8 09:03:13 2016 -0500
Committer: Nate Cole <nc...@hortonworks.com>
Committed: Mon Feb 8 09:03:13 2016 -0500

----------------------------------------------------------------------
 ambari-agent/conf/unix/ambari-agent             |   8 +-
 ambari-agent/conf/unix/install-helper.sh        |  26 +-
 ambari-agent/src/packages/tarball/all.xml       |   5 -
 .../src/main/python/ambari_commons/os_check.py  |  27 +-
 .../ambari_commons/resources/os_family.json     |  10 +-
 .../core/providers/__init__.py                  |   3 +
 .../libraries/providers/__init__.py             |   3 +
 ambari-common/src/main/unix/ambari-python-wrap  |   8 +-
 .../HDP/2.0.1/services/GANGLIA/metainfo.xml     |   4 +-
 .../stacks/HDP/2.0.1/services/HDFS/metainfo.xml |   2 +-
 .../stacks/HDP/2.0.1/services/HIVE/metainfo.xml |   4 +-
 .../HDP/2.0.1/services/OOZIE/metainfo.xml       |   2 +-
 .../HDP/2.0.5/services/GANGLIA/metainfo.xml     |   4 +-
 .../stacks/HDP/2.0.5/services/HDFS/metainfo.xml |   2 +-
 .../stacks/HDP/2.0.5/services/HIVE/metainfo.xml |   4 +-
 .../HDP/2.0.5/services/OOZIE/metainfo.xml       |   2 +-
 .../stacks/HDP/2.0.7/services/HIVE/metainfo.xml |   4 +-
 .../2.1.1/services/AMBARI_METRICS/metainfo.xml  |   2 +-
 ambari-server/conf/unix/ambari-env.sh           |   1 +
 ambari-server/conf/unix/install-helper.sh       |  24 +-
 ambari-server/sbin/ambari-server                |   7 +-
 ambari-server/src/main/assemblies/server.xml    |   5 -
 .../src/main/python/ambari_server/utils.py      |   5 +-
 .../0.1.0/configuration/ams-grafana-ini.xml     | 542 ++++++++++---------
 .../AMBARI_METRICS/0.1.0/metainfo.xml           |   2 +-
 .../0.1.0/package/scripts/params.py             |   5 +
 .../common-services/GANGLIA/3.5.0/metainfo.xml  |   4 +-
 .../HAWQ/2.0.0/configuration/hawq-site.xml      |  12 +
 .../HAWQ/2.0.0/package/scripts/utils.py         |   6 +-
 .../common-services/HDFS/2.1.0.2.0/metainfo.xml |   2 +-
 .../HIVE/0.12.0.2.0/metainfo.xml                |   4 +-
 .../KERBEROS/1.10.3-10/metainfo.xml             |   2 +-
 .../OOZIE/4.0.0.2.0/metainfo.xml                |   2 +-
 .../stacks/HDP/2.1/services/HIVE/metainfo.xml   |   4 +-
 .../stacks/HDP/2.1/services/OOZIE/metainfo.xml  |   2 +-
 .../resources/stacks/HDP/2.2/repos/repoinfo.xml |  12 +
 .../stacks/HDP/2.2/services/FALCON/metainfo.xml |   2 +-
 .../stacks/HDP/2.2/services/FLUME/metainfo.xml  |   2 +-
 .../stacks/HDP/2.2/services/HBASE/metainfo.xml  |   2 +-
 .../stacks/HDP/2.2/services/HDFS/metainfo.xml   |   2 +-
 .../stacks/HDP/2.2/services/HIVE/metainfo.xml   |   6 +-
 .../stacks/HDP/2.2/services/KAFKA/metainfo.xml  |   2 +-
 .../stacks/HDP/2.2/services/KNOX/metainfo.xml   |   2 +-
 .../stacks/HDP/2.2/services/OOZIE/metainfo.xml  |   2 +-
 .../stacks/HDP/2.2/services/PIG/metainfo.xml    |   2 +-
 .../stacks/HDP/2.2/services/RANGER/metainfo.xml |   2 +-
 .../stacks/HDP/2.2/services/SLIDER/metainfo.xml |   2 +-
 .../stacks/HDP/2.2/services/SPARK/metainfo.xml  |   2 +-
 .../stacks/HDP/2.2/services/SQOOP/metainfo.xml  |   2 +-
 .../stacks/HDP/2.2/services/STORM/metainfo.xml  |   2 +-
 .../stacks/HDP/2.2/services/TEZ/metainfo.xml    |   2 +-
 .../stacks/HDP/2.2/services/YARN/metainfo.xml   |   4 +-
 .../HDP/2.3.ECS/services/ZOOKEEPER/metainfo.xml |   2 +-
 .../resources/stacks/HDP/2.3/repos/repoinfo.xml |  12 +
 .../HDP/2.3/services/ACCUMULO/metainfo.xml      |   2 +-
 .../stacks/HDP/2.3/services/ATLAS/metainfo.xml  |   2 +-
 .../stacks/HDP/2.3/services/HDFS/metainfo.xml   |   2 +-
 .../stacks/HDP/2.3/services/HIVE/metainfo.xml   |   6 +-
 .../stacks/HDP/2.3/services/KAFKA/widgets.json  | 182 +++++++
 .../stacks/HDP/2.3/services/OOZIE/metainfo.xml  |   2 +-
 .../stacks/HDP/2.3/services/PIG/metainfo.xml    |   2 +-
 .../HDP/2.3/services/RANGER_KMS/metainfo.xml    |   2 +-
 .../stacks/HDP/2.3/services/STORM/widgets.json  | 127 +++++
 .../HDP/2.3/services/ZOOKEEPER/metainfo.xml     |   2 +-
 .../stacks/HDP/2.3/services/stack_advisor.py    |  37 +-
 .../resources/stacks/HDP/2.4/repos/repoinfo.xml |  12 +
 .../stacks/2.3/common/test_stack_advisor.py     |  51 +-
 ambari-web/app/assets/test/tests.js             |   1 -
 .../hawq/addStandby/step3_controller.js         |  23 +-
 .../main/host/bulk_operations_controller.js     |  25 +-
 ambari-web/app/controllers/main/host/details.js |  44 +-
 ambari-web/app/controllers/main/service/item.js |   7 +-
 ambari-web/app/data/service_graph_config.js     |  15 -
 ambari-web/app/messages.js                      |  30 +-
 .../mixins/common/chart/storm_linear_time.js    |  74 ---
 .../app/mixins/common/widgets/widget_mixin.js   |   2 +-
 .../configs/add_component_config_initializer.js |  13 +-
 ambari-web/app/views.js                         |   9 -
 .../app/views/common/chart/linear_time.js       |  20 -
 .../service/info/metrics/kafka/broker_topic.js  |  50 --
 .../info/metrics/kafka/controller_status.js     |  49 --
 .../info/metrics/kafka/kafka_controller.js      |  41 --
 .../info/metrics/kafka/replica_fetcher.js       |  41 --
 .../info/metrics/kafka/replica_manager.js       |  46 --
 .../info/metrics/storm/executors_metric.js      |  34 --
 .../info/metrics/storm/slots_number_metric.js   |  42 --
 .../service/info/metrics/storm/tasks_metric.js  |  34 --
 .../info/metrics/storm/topologies_metric.js     |  34 --
 .../test/controllers/main/host/details_test.js  |  97 +++-
 .../common/chart/storm_linear_time_test.js      |  80 ---
 .../resources/ui/scripts/init-ambari-view.js    |   5 +-
 docs/src/site/apt/index.apt                     |   2 +-
 docs/src/site/apt/whats-new.apt                 |  24 +-
 docs/src/site/site.xml                          |   2 +
 94 files changed, 1052 insertions(+), 1034 deletions(-)
----------------------------------------------------------------------



[04/50] [abbrv] ambari git commit: AMBARI-14743. Update package installation for version definition file (ncole)

Posted by nc...@apache.org.
AMBARI-14743. Update package installation for version definition file (ncole)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/fc402ebc
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/fc402ebc
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/fc402ebc

Branch: refs/heads/trunk
Commit: fc402ebc50929f30224481d605325157cb302ec9
Parents: f31a06d
Author: Nate Cole <nc...@hortonworks.com>
Authored: Wed Jan 20 11:19:59 2016 -0500
Committer: Nate Cole <nc...@hortonworks.com>
Committed: Wed Jan 20 12:04:15 2016 -0500

----------------------------------------------------------------------
 .../ClusterStackVersionResourceProvider.java    |  28 ++-
 .../RepositoryVersionResourceProvider.java      |  17 +-
 .../orm/entities/RepositoryVersionEntity.java   |  15 ++
 .../state/repository/AvailableService.java      |   7 +
 ...ClusterStackVersionResourceProviderTest.java | 238 ++++++++++++++++++-
 .../src/test/resources/hbase_version_test.xml   |  59 +++++
 6 files changed, 348 insertions(+), 16 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/fc402ebc/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClusterStackVersionResourceProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClusterStackVersionResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClusterStackVersionResourceProvider.java
index b114ea5..ce5606e 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClusterStackVersionResourceProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClusterStackVersionResourceProvider.java
@@ -73,11 +73,14 @@ import org.apache.ambari.server.state.Clusters;
 import org.apache.ambari.server.state.ComponentInfo;
 import org.apache.ambari.server.state.Host;
 import org.apache.ambari.server.state.MaintenanceState;
+import org.apache.ambari.server.state.RepositoryType;
 import org.apache.ambari.server.state.RepositoryVersionState;
 import org.apache.ambari.server.state.ServiceComponentHost;
 import org.apache.ambari.server.state.ServiceInfo;
 import org.apache.ambari.server.state.ServiceOsSpecific;
 import org.apache.ambari.server.state.StackId;
+import org.apache.ambari.server.state.repository.AvailableService;
+import org.apache.ambari.server.state.repository.VersionDefinitionXml;
 import org.apache.ambari.server.utils.StageUtils;
 import org.apache.commons.lang.StringUtils;
 
@@ -415,9 +418,27 @@ public class ClusterStackVersionResourceProvider extends AbstractControllerResou
 
       // determine services for the repo
       Set<String> serviceNames = new HashSet<>();
-//      for (RepositoryVersionEntity.Component component : repoVersionEnt.getComponents()) {
-//        serviceNames.add(component.getService());
-//      }
+      if (RepositoryType.STANDARD != repoVersionEnt.getType() && null != repoVersionEnt.getVersionXsd()) {
+        VersionDefinitionXml xml = null;
+        try {
+         xml = repoVersionEnt.getRepositoryXml();
+
+         Collection<AvailableService> available = xml.getAvailableServices(
+             ami.getStack(stackId.getStackName(), stackId.getStackVersion()));
+
+         // check if the service is part of the cluster
+         for (AvailableService as : available) {
+           if (cluster.getServices().containsKey(as.getName())) {
+             serviceNames.add(as.getName());
+           }
+         }
+
+        } catch (Exception e) {
+          String msg = String.format("Could not load repo xml for %s", repoVersionEnt.getDisplayName());
+          LOG.error(msg, e);
+          throw new SystemException (msg);
+        }
+      }
 
       // Populate with commands for host
       for (int i = 0; i < maxTasks && hostIterator.hasNext(); i++) {
@@ -573,6 +594,7 @@ public class ClusterStackVersionResourceProvider extends AbstractControllerResou
    */
   private boolean hostHasVersionableComponents(Cluster cluster, Set<String> serviceNames, AmbariMetaInfo ami, StackId stackId,
       Host host) throws SystemException {
+
     List<ServiceComponentHost> components = cluster.getServiceComponentHosts(host.getHostName());
 
     for (ServiceComponentHost component : components) {

http://git-wip-us.apache.org/repos/asf/ambari/blob/fc402ebc/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/RepositoryVersionResourceProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/RepositoryVersionResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/RepositoryVersionResourceProvider.java
index 8b34cc8..92b14b7 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/RepositoryVersionResourceProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/RepositoryVersionResourceProvider.java
@@ -263,15 +263,16 @@ public class RepositoryVersionResourceProvider extends AbstractAuthorizedResourc
       setResourceProperty(resource, REPOSITORY_VERSION_REPOSITORY_VERSION_PROPERTY_ID, entity.getVersion(), requestedIds);
       setResourceProperty(resource, REPOSITORY_VERSION_TYPE_PROPERTY_ID, entity.getType(), requestedIds);
 
-      if (null != entity.getVersionXsd()) {
-        final VersionDefinitionXml xml;
-        final StackInfo stack;
+      final VersionDefinitionXml xml;
 
-        try {
-          xml = VersionDefinitionXml.load(entity.getVersionXml());
-        } catch (Exception e) {
-          throw new SystemException(String.format("Could not load xml for Repository %s", entity.getId()), e);
-        }
+      try {
+        xml = entity.getRepositoryXml();
+      } catch (Exception e) {
+        throw new SystemException(String.format("Could not load xml for Repository %s", entity.getId()), e);
+      }
+
+      if (null != xml) {
+        final StackInfo stack;
 
         try {
           stack = ambariMetaInfo.getStack(entity.getStackName(), entity.getStackVersion());

http://git-wip-us.apache.org/repos/asf/ambari/blob/fc402ebc/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/RepositoryVersionEntity.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/RepositoryVersionEntity.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/RepositoryVersionEntity.java
index a31b135..e2e455b 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/RepositoryVersionEntity.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/RepositoryVersionEntity.java
@@ -44,6 +44,7 @@ import javax.persistence.UniqueConstraint;
 import org.apache.ambari.server.StaticallyInject;
 import org.apache.ambari.server.state.RepositoryType;
 import org.apache.ambari.server.state.StackId;
+import org.apache.ambari.server.state.repository.VersionDefinitionXml;
 import org.apache.ambari.server.state.stack.upgrade.RepositoryVersionHelper;
 import org.apache.commons.lang.StringUtils;
 import org.slf4j.Logger;
@@ -322,6 +323,20 @@ public class RepositoryVersionEntity {
     versionXsd = xsdLocation;
   }
 
+  /**
+   * Parse the version XML into its object representation.  This causes the XML to be lazy-loaded
+   * from storage.
+   * @return {@code null} if the XSD is not available.
+   * @throws Exception
+   */
+  public VersionDefinitionXml getRepositoryXml() throws Exception {
+    if (null == versionXsd) {
+      return null;
+    }
+
+    return VersionDefinitionXml.load(getVersionXml());
+  }
+
   @Override
   public int hashCode() {
     int result = id != null ? id.hashCode() : 0;

http://git-wip-us.apache.org/repos/asf/ambari/blob/fc402ebc/ambari-server/src/main/java/org/apache/ambari/server/state/repository/AvailableService.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/repository/AvailableService.java b/ambari-server/src/main/java/org/apache/ambari/server/state/repository/AvailableService.java
index ce42b69..70f3c40 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/repository/AvailableService.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/repository/AvailableService.java
@@ -45,6 +45,13 @@ public class AvailableService {
   }
 
   /**
+   * @return the service name
+   */
+  public String getName() {
+    return name;
+  }
+
+  /**
    * @return the list of versions to append additional versions.
    */
   public List<AvailableVersion> getVersions() {

http://git-wip-us.apache.org/repos/asf/ambari/blob/fc402ebc/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ClusterStackVersionResourceProviderTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ClusterStackVersionResourceProviderTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ClusterStackVersionResourceProviderTest.java
index 5c21433..c6d0c57 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ClusterStackVersionResourceProviderTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ClusterStackVersionResourceProviderTest.java
@@ -29,8 +29,11 @@ import static org.easymock.EasyMock.expectLastCall;
 import static org.easymock.EasyMock.replay;
 import static org.easymock.EasyMock.verify;
 
+import java.io.File;
+import java.io.FileInputStream;
 import java.lang.reflect.Field;
 import java.util.ArrayList;
+import java.util.Arrays;
 import java.util.Collections;
 import java.util.HashMap;
 import java.util.LinkedHashMap;
@@ -51,6 +54,7 @@ import org.apache.ambari.server.agent.ExecutionCommand;
 import org.apache.ambari.server.api.services.AmbariMetaInfo;
 import org.apache.ambari.server.configuration.Configuration;
 import org.apache.ambari.server.controller.AmbariManagementController;
+import org.apache.ambari.server.controller.ExecuteActionRequest;
 import org.apache.ambari.server.controller.RequestStatusResponse;
 import org.apache.ambari.server.controller.ResourceProviderFactory;
 import org.apache.ambari.server.controller.spi.Request;
@@ -80,8 +84,10 @@ import org.apache.ambari.server.state.Clusters;
 import org.apache.ambari.server.state.ConfigHelper;
 import org.apache.ambari.server.state.Host;
 import org.apache.ambari.server.state.MaintenanceState;
+import org.apache.ambari.server.state.RepositoryType;
 import org.apache.ambari.server.state.RepositoryVersionState;
 import org.apache.ambari.server.state.Service;
+import org.apache.ambari.server.state.ServiceComponent;
 import org.apache.ambari.server.state.ServiceComponentHost;
 import org.apache.ambari.server.state.ServiceInfo;
 import org.apache.ambari.server.state.ServiceOsSpecific;
@@ -89,6 +95,7 @@ import org.apache.ambari.server.state.StackId;
 import org.apache.ambari.server.state.cluster.ClusterImpl;
 import org.apache.ambari.server.topology.TopologyManager;
 import org.apache.ambari.server.utils.StageUtils;
+import org.apache.commons.io.IOUtils;
 import org.easymock.Capture;
 import org.easymock.EasyMock;
 import org.easymock.IAnswer;
@@ -123,7 +130,7 @@ public class ClusterStackVersionResourceProviderTest {
   private HostVersionDAO hostVersionDAO;
   private HostComponentStateDAO hostComponentStateDAO;
 
-  private String operatingSystemsJson = "[\n" +
+  private static final String OS_JSON = "[\n" +
           "   {\n" +
           "      \"repositories\":[\n" +
           "         {\n" +
@@ -141,7 +148,6 @@ public class ClusterStackVersionResourceProviderTest {
           "   }\n" +
           "]";
 
-
   @Before
   public void setup() throws Exception {
     // Create instances of mocks
@@ -182,7 +188,7 @@ public class ClusterStackVersionResourceProviderTest {
 
     RepositoryVersionEntity repoVersion = new RepositoryVersionEntity();
     repoVersion.setId(1l);
-    repoVersion.setOperatingSystems(operatingSystemsJson);
+    repoVersion.setOperatingSystems(OS_JSON);
 
     Map<String, Host> hostsForCluster = new HashMap<String, Host>();
     int hostCount = 10;
@@ -345,6 +351,228 @@ public class ClusterStackVersionResourceProviderTest {
     // check that the success factor was populated in the stage
     Float successFactor = successFactors.get(Role.INSTALL_PACKAGES);
     Assert.assertEquals(Float.valueOf(0.85f), successFactor);
+
+
+  }
+
+  @Test
+  public void testCreateResourcesForPatch() throws Exception {
+    Resource.Type type = Resource.Type.ClusterStackVersion;
+
+    AmbariManagementController managementController = createMock(AmbariManagementController.class);
+    Clusters clusters = createNiceMock(Clusters.class);
+    Cluster cluster = createNiceMock(Cluster.class);
+    StackId stackId = new StackId("HDP", "2.0.1");
+
+    File f = new File("src/test/resources/hbase_version_test.xml");
+
+    RepositoryVersionEntity repoVersion = new RepositoryVersionEntity();
+    repoVersion.setId(1l);
+    repoVersion.setOperatingSystems(OS_JSON);
+    repoVersion.setVersionXml(IOUtils.toString(new FileInputStream(f)));
+    repoVersion.setVersionXsd("version_definition.xsd");
+    repoVersion.setType(RepositoryType.PATCH);
+
+    ambariMetaInfo.getComponent("HDP", "2.1.1", "HBASE", "HBASE_MASTER").setVersionAdvertised(true);
+
+
+    Map<String, Host> hostsForCluster = new HashMap<String, Host>();
+    int hostCount = 10;
+    for (int i = 0; i < hostCount; i++) {
+      String hostname = "host" + i;
+      Host host = createNiceMock(hostname, Host.class);
+      expect(host.getHostName()).andReturn(hostname).anyTimes();
+      expect(host.getOsFamily()).andReturn("redhat6").anyTimes();
+      expect(host.getMaintenanceState(EasyMock.anyLong())).andReturn(
+          MaintenanceState.OFF).anyTimes();
+      expect(host.getAllHostVersions()).andReturn(
+          Collections.<HostVersionEntity>emptyList()).anyTimes();
+
+      replay(host);
+      hostsForCluster.put(hostname, host);
+    }
+
+    Service hdfsService = createNiceMock(Service.class);
+    Service hbaseService = createNiceMock(Service.class);
+    expect(hdfsService.getName()).andReturn("HDFS").anyTimes();
+    expect(hbaseService.getName()).andReturn("HBASE").anyTimes();
+//    Service metricsService = createNiceMock(Service.class);
+
+    ServiceComponent scNameNode = createNiceMock(ServiceComponent.class);
+    ServiceComponent scDataNode = createNiceMock(ServiceComponent.class);
+    ServiceComponent scHBaseMaster = createNiceMock(ServiceComponent.class);
+    ServiceComponent scMetricCollector = createNiceMock(ServiceComponent.class);
+
+    expect(hdfsService.getServiceComponents()).andReturn(new HashMap<String, ServiceComponent>());
+    expect(hbaseService.getServiceComponents()).andReturn(new HashMap<String, ServiceComponent>());
+//    expect(metricsService.getServiceComponents()).andReturn(new HashMap<String, ServiceComponent>());
+
+
+    Map<String, Service> serviceMap = new HashMap<>();
+    serviceMap.put("HDFS", hdfsService);
+    serviceMap.put("HBASE", hbaseService);
+
+
+    final ServiceComponentHost schDatanode = createMock(ServiceComponentHost.class);
+    expect(schDatanode.getServiceName()).andReturn("HDFS").anyTimes();
+    expect(schDatanode.getServiceComponentName()).andReturn("DATANODE").anyTimes();
+
+    final ServiceComponentHost schNamenode = createMock(ServiceComponentHost.class);
+    expect(schNamenode.getServiceName()).andReturn("HDFS").anyTimes();
+    expect(schNamenode.getServiceComponentName()).andReturn("NAMENODE").anyTimes();
+
+    final ServiceComponentHost schAMS = createMock(ServiceComponentHost.class);
+    expect(schAMS.getServiceName()).andReturn("AMBARI_METRICS").anyTimes();
+    expect(schAMS.getServiceComponentName()).andReturn("METRICS_COLLECTOR").anyTimes();
+
+    final ServiceComponentHost schHBM = createMock(ServiceComponentHost.class);
+    expect(schHBM.getServiceName()).andReturn("HBASE").anyTimes();
+    expect(schHBM.getServiceComponentName()).andReturn("HBASE_MASTER").anyTimes();
+
+    // First host contains versionable components
+    final List<ServiceComponentHost> schsH1 = Arrays.asList(schDatanode, schNamenode, schAMS);
+
+    // Second host does not contain versionable components
+    final List<ServiceComponentHost> schsH2 = Arrays.asList(schAMS);
+
+    // Third host only has hbase
+    final List<ServiceComponentHost> schsH3 = Arrays.asList(schHBM);
+
+    ServiceOsSpecific.Package hdfsPackage = new ServiceOsSpecific.Package();
+    hdfsPackage.setName("hdfs");
+
+//    ServiceOsSpecific.Package hbasePackage = new ServiceOsSpecific.Package();
+//    hbasePackage.setName("hbase");
+
+    List<ServiceOsSpecific.Package> packages = Collections.singletonList(hdfsPackage);
+
+    ActionManager actionManager = createNiceMock(ActionManager.class);
+
+    RequestStatusResponse response = createNiceMock(RequestStatusResponse.class);
+    ResourceProviderFactory resourceProviderFactory = createNiceMock(ResourceProviderFactory.class);
+    ResourceProvider csvResourceProvider = createNiceMock(ClusterStackVersionResourceProvider.class);
+
+    AbstractControllerResourceProvider.init(resourceProviderFactory);
+
+    Map<String, Map<String, String>> hostConfigTags = new HashMap<String, Map<String, String>>();
+    expect(configHelper.getEffectiveDesiredTags(anyObject(ClusterImpl.class), anyObject(String.class))).andReturn(hostConfigTags);
+
+    expect(managementController.getClusters()).andReturn(clusters).anyTimes();
+    expect(managementController.getAmbariMetaInfo()).andReturn(ambariMetaInfo).anyTimes();
+    expect(managementController.getAuthName()).andReturn("admin").anyTimes();
+    expect(managementController.getActionManager()).andReturn(actionManager).anyTimes();
+    expect(managementController.getJdkResourceUrl()).andReturn("/JdkResourceUrl").anyTimes();
+    expect(managementController.getPackagesForServiceHost(anyObject(ServiceInfo.class),
+            (Map<String, String>) anyObject(List.class), anyObject(String.class))).
+            andReturn(packages).times(1); // only one host has the versionable component
+
+    expect(resourceProviderFactory.getHostResourceProvider(anyObject(Set.class), anyObject(Map.class),
+            eq(managementController))).andReturn(csvResourceProvider).anyTimes();
+
+    expect(clusters.getCluster(anyObject(String.class))).andReturn(cluster);
+    expect(clusters.getHostsForCluster(anyObject(String.class))).andReturn(
+        hostsForCluster).anyTimes();
+
+    String clusterName = "Cluster100";
+    expect(cluster.getClusterId()).andReturn(1L).anyTimes();
+    expect(cluster.getHosts()).andReturn(hostsForCluster.values()).atLeastOnce();
+    expect(cluster.getServices()).andReturn(serviceMap).anyTimes();
+    expect(cluster.getCurrentStackVersion()).andReturn(stackId);
+    expect(cluster.getServiceComponentHosts(anyObject(String.class))).andAnswer(new IAnswer<List<ServiceComponentHost>>() {
+      @Override
+      public List<ServiceComponentHost> answer() throws Throwable {
+        String hostname = (String) EasyMock.getCurrentArguments()[0];
+        if (hostname.equals("host2")) {
+          return schsH2;
+        } else if (hostname.equals("host3")) {
+          return schsH3;
+        } else {
+          return schsH1;
+        }
+      }
+    }).anyTimes();
+
+    ExecutionCommand executionCommand = createNiceMock(ExecutionCommand.class);
+    ExecutionCommandWrapper executionCommandWrapper = createNiceMock(ExecutionCommandWrapper.class);
+
+    expect(executionCommandWrapper.getExecutionCommand()).andReturn(executionCommand).anyTimes();
+
+    Stage stage = createNiceMock(Stage.class);
+    expect(stage.getExecutionCommandWrapper(anyObject(String.class), anyObject(String.class))).
+            andReturn(executionCommandWrapper).anyTimes();
+
+    Map<Role, Float> successFactors = new HashMap<>();
+    expect(stage.getSuccessFactors()).andReturn(successFactors).atLeastOnce();
+
+    // Check that we create proper stage count
+    expect(stageFactory.createNew(anyLong(), anyObject(String.class),
+            anyObject(String.class), anyLong(),
+            anyObject(String.class), anyObject(String.class), anyObject(String.class),
+            anyObject(String.class))).andReturn(stage).
+            times((int) Math.ceil(hostCount / MAX_TASKS_PER_STAGE));
+
+    expect(
+            repositoryVersionDAOMock.findByStackAndVersion(
+                    anyObject(StackId.class),
+                    anyObject(String.class))).andReturn(repoVersion);
+
+    Capture<org.apache.ambari.server.actionmanager.Request> c = Capture.newInstance();
+    Capture<ExecuteActionRequest> ear = Capture.newInstance();
+
+    actionManager.sendActions(capture(c), capture(ear));
+    expectLastCall().atLeastOnce();
+    expect(actionManager.getRequestTasks(anyLong())).andReturn(Collections.<HostRoleCommand>emptyList()).anyTimes();
+
+    ClusterEntity clusterEntity = new ClusterEntity();
+    clusterEntity.setClusterId(1l);
+    clusterEntity.setClusterName(clusterName);
+    ClusterVersionEntity cve = new ClusterVersionEntity(clusterEntity,
+            repoVersion, RepositoryVersionState.INSTALL_FAILED, 0, "");
+    expect(clusterVersionDAO.findByClusterAndStackAndVersion(anyObject(String.class),
+            anyObject(StackId.class), anyObject(String.class))).andReturn(cve);
+
+    TopologyManager topologyManager = injector.getInstance(TopologyManager.class);
+    StageUtils.setTopologyManager(topologyManager);
+
+    // replay
+    replay(managementController, response, clusters, hdfsService, hbaseService, resourceProviderFactory, csvResourceProvider,
+            cluster, repositoryVersionDAOMock, configHelper, schDatanode, schNamenode, schAMS, schHBM, actionManager,
+            executionCommand, executionCommandWrapper,stage, stageFactory, clusterVersionDAO);
+
+    ResourceProvider provider = AbstractControllerResourceProvider.getResourceProvider(
+        type,
+        PropertyHelper.getPropertyIds(type),
+        PropertyHelper.getKeyPropertyIds(type),
+        managementController);
+
+    injector.injectMembers(provider);
+
+    // add the property map to a set for the request.  add more maps for multiple creates
+    Set<Map<String, Object>> propertySet = new LinkedHashSet<Map<String, Object>>();
+
+    Map<String, Object> properties = new LinkedHashMap<String, Object>();
+
+    // add properties to the request map
+    properties.put(ClusterStackVersionResourceProvider.CLUSTER_STACK_VERSION_CLUSTER_NAME_PROPERTY_ID, "Cluster100");
+    properties.put(ClusterStackVersionResourceProvider.CLUSTER_STACK_VERSION_REPOSITORY_VERSION_PROPERTY_ID, "2.2.0.1-885");
+    properties.put(ClusterStackVersionResourceProvider.CLUSTER_STACK_VERSION_STACK_PROPERTY_ID, "HDP");
+    properties.put(ClusterStackVersionResourceProvider.CLUSTER_STACK_VERSION_VERSION_PROPERTY_ID, "2.1.1");
+
+    propertySet.add(properties);
+
+    // create the request
+    Request request = PropertyHelper.getCreateRequest(propertySet, null);
+
+    RequestStatus status = provider.createResources(request);
+    Assert.assertNotNull(status);
+
+    // verify
+    verify(managementController, response, clusters, stageFactory, stage);
+
+    // check that the success factor was populated in the stage
+    Float successFactor = successFactors.get(Role.INSTALL_PACKAGES);
+    Assert.assertEquals(Float.valueOf(0.85f), successFactor);
+
   }
 
 
@@ -395,7 +623,7 @@ public class ClusterStackVersionResourceProviderTest {
     expect(clusters.getCluster(anyObject(String.class))).andReturn(cluster);
 
     RepositoryVersionEntity repoVersion = new RepositoryVersionEntity();
-    repoVersion.setOperatingSystems(operatingSystemsJson);
+    repoVersion.setOperatingSystems(OS_JSON);
     StackEntity newDesiredStack = stackDAO.find("HDP", "2.0.1");
     repoVersion.setStack(newDesiredStack);
 
@@ -541,7 +769,7 @@ public class ClusterStackVersionResourceProviderTest {
     expect(clusters.getCluster(anyObject(String.class))).andReturn(cluster);
 
     RepositoryVersionEntity repoVersion = new RepositoryVersionEntity();
-    repoVersion.setOperatingSystems(operatingSystemsJson);
+    repoVersion.setOperatingSystems(OS_JSON);
     StackEntity newDesiredStack = stackDAO.find("HDP", "2.0.1");
     repoVersion.setStack(newDesiredStack);
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/fc402ebc/ambari-server/src/test/resources/hbase_version_test.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/resources/hbase_version_test.xml b/ambari-server/src/test/resources/hbase_version_test.xml
new file mode 100644
index 0000000..9df07ed
--- /dev/null
+++ b/ambari-server/src/test/resources/hbase_version_test.xml
@@ -0,0 +1,59 @@
+<?xml version="1.0"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+
+<repository-version xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+  xsi:noNamespaceSchemaLocation="version_definition.xsd">
+  
+  <release>
+    <type>PATCH</type>
+    <stack-id>HDP-2.3</stack-id>
+    <version>2.3.4.0</version>
+    <build>3396</build>
+    <compatible-with>2.3.2.[0-9]</compatible-with>
+    <release-notes>http://docs.hortonworks.com/HDPDocuments/HDP2/HDP-2.3.4/</release-notes>
+  </release>
+  
+  <manifest>
+    <service id="HBASE-112" name="HBASE" version="1.1.2" version-id="2_3_4_0-3396" />
+  </manifest>
+  
+  <available-services>
+    <service idref="HBASE-112" />
+  </available-services>
+  
+  <repository-info>
+    <os family="redhat6">
+      <repo>
+        <baseurl>http://public-repo-1.hortonworks.com/HDP/centos6/2.x/updates/2.3.4.0</baseurl>
+        <repoid>HDP-2.3</repoid>
+        <reponame>HDP</reponame>
+      </repo>
+      <repo>
+        <baseurl>http://public-repo-1.hortonworks.com/HDP-UTILS-1.1.0.20/repos/centos6</baseurl>
+        <repoid>HDP-UTILS-1.1.0.20</repoid>
+        <reponame>HDP-UTILS</reponame>
+      </repo>
+    </os>
+  </repository-info>
+  
+  <upgrade>
+    <configuration type="hdfs-site">
+      <set key="foo" value="bar" />
+    </configuration>
+  </upgrade>
+</repository-version>


[29/50] [abbrv] ambari git commit: AMBARI-15058 - Schema changes for component history (jonathanhurley)

Posted by nc...@apache.org.
AMBARI-15058 - Schema changes for component history (jonathanhurley)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/e4d1475e
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/e4d1475e
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/e4d1475e

Branch: refs/heads/trunk
Commit: e4d1475eab7be09baeed6624447b122f2bf7853c
Parents: 718f2ea
Author: Jonathan Hurley <jh...@hortonworks.com>
Authored: Tue Feb 16 13:40:47 2016 -0500
Committer: Jonathan Hurley <jh...@hortonworks.com>
Committed: Tue Feb 16 15:54:39 2016 -0500

----------------------------------------------------------------------
 .../dao/ServiceComponentDesiredStateDAO.java    |  53 +++++
 .../ServiceComponentDesiredStateEntity.java     |  40 ++++
 .../entities/ServiceComponentHistoryEntity.java | 219 +++++++++++++++++++
 .../main/resources/Ambari-DDL-Derby-CREATE.sql  |  34 ++-
 .../main/resources/Ambari-DDL-MySQL-CREATE.sql  |  26 ++-
 .../main/resources/Ambari-DDL-Oracle-CREATE.sql |  26 ++-
 .../resources/Ambari-DDL-Postgres-CREATE.sql    |  31 ++-
 .../Ambari-DDL-Postgres-EMBEDDED-CREATE.sql     |  28 ++-
 .../resources/Ambari-DDL-SQLAnywhere-CREATE.sql |  29 ++-
 .../resources/Ambari-DDL-SQLServer-CREATE.sql   |  29 ++-
 .../src/main/resources/META-INF/persistence.xml |   1 +
 .../server/state/ServiceComponentTest.java      | 140 ++++++++++++
 12 files changed, 599 insertions(+), 57 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/e4d1475e/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/ServiceComponentDesiredStateDAO.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/ServiceComponentDesiredStateDAO.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/ServiceComponentDesiredStateDAO.java
index b8c2fcc..4c906cc 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/ServiceComponentDesiredStateDAO.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/ServiceComponentDesiredStateDAO.java
@@ -20,12 +20,14 @@ package org.apache.ambari.server.orm.dao;
 
 import java.util.List;
 
+import javax.persistence.CascadeType;
 import javax.persistence.EntityManager;
 import javax.persistence.NoResultException;
 import javax.persistence.TypedQuery;
 
 import org.apache.ambari.server.orm.RequiresSession;
 import org.apache.ambari.server.orm.entities.ServiceComponentDesiredStateEntity;
+import org.apache.ambari.server.orm.entities.ServiceComponentHistoryEntity;
 
 import com.google.inject.Inject;
 import com.google.inject.Provider;
@@ -125,4 +127,55 @@ public class ServiceComponentDesiredStateDAO {
       entityManagerProvider.get().remove(entity);
     }
   }
+
+  /**
+   * Creates a service component upgrade/downgrade historical event.
+   *
+   * @param serviceComponentHistoryEntity
+   */
+  @Transactional
+  public void create(ServiceComponentHistoryEntity serviceComponentHistoryEntity) {
+    entityManagerProvider.get().persist(serviceComponentHistoryEntity);
+  }
+
+  /**
+   * Merges a service component upgrade/downgrade historical event, creating it
+   * in the process if it does not already exist. The associated
+   * {@link ServiceComponentDesiredStateEntity} is automatically merged via its
+   * {@link CascadeType}.
+   *
+   * @param serviceComponentHistoryEntity
+   * @return
+   */
+  @Transactional
+  public ServiceComponentHistoryEntity merge(
+      ServiceComponentHistoryEntity serviceComponentHistoryEntity) {
+    return entityManagerProvider.get().merge(serviceComponentHistoryEntity);
+  }
+
+  /**
+   * Gets the history for a component.
+   *
+   * @param clusterId
+   *          the component's cluster.
+   * @param serviceName
+   *          the component's service (not {@code null}).
+   * @param componentName
+   *          the component's name (not {@code null}).
+   * @return
+   */
+  @RequiresSession
+  public List<ServiceComponentHistoryEntity> findHistory(long clusterId, String serviceName,
+      String componentName) {
+    EntityManager entityManager = entityManagerProvider.get();
+    TypedQuery<ServiceComponentHistoryEntity> query = entityManager.createNamedQuery(
+        "ServiceComponentHistoryEntity.findByComponent", ServiceComponentHistoryEntity.class);
+
+    query.setParameter("clusterId", clusterId);
+    query.setParameter("serviceName", serviceName);
+    query.setParameter("componentName", componentName);
+
+    ServiceComponentDesiredStateEntity entity = null;
+    return daoUtils.selectList(query);
+  }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/e4d1475e/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ServiceComponentDesiredStateEntity.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ServiceComponentDesiredStateEntity.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ServiceComponentDesiredStateEntity.java
index d2d1b42..65cc107 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ServiceComponentDesiredStateEntity.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ServiceComponentDesiredStateEntity.java
@@ -18,8 +18,10 @@
 
 package org.apache.ambari.server.orm.entities;
 
+import java.util.ArrayList;
 import java.util.Collection;
 
+import javax.persistence.CascadeType;
 import javax.persistence.Column;
 import javax.persistence.Entity;
 import javax.persistence.EnumType;
@@ -96,6 +98,15 @@ public class ServiceComponentDesiredStateEntity {
   @OneToMany(mappedBy = "serviceComponentDesiredStateEntity")
   private Collection<HostComponentDesiredStateEntity> hostComponentDesiredStateEntities;
 
+  /**
+   * All of the upgrades and downgrades which have occurred for this component.
+   * Can be {@code null} for none.
+   */
+  @OneToMany(
+      mappedBy = "m_serviceComponentDesiredStateEntity",
+      cascade = { CascadeType.MERGE, CascadeType.REFRESH, CascadeType.REMOVE })
+  private Collection<ServiceComponentHistoryEntity> serviceComponentHistory;
+
   public Long getId() {
     return id;
   }
@@ -140,6 +151,35 @@ public class ServiceComponentDesiredStateEntity {
     this.desiredStack = desiredStack;
   }
 
+  /**
+   * Adds a historical entry for the version of this service component. New
+   * entries are automatically created when this entities is merged via a
+   * {@link CascadeType#MERGE}.
+   *
+   * @param historicalEntry
+   *          the entry to add.
+   */
+  public void addHistory(ServiceComponentHistoryEntity historicalEntry) {
+    if (null == serviceComponentHistory) {
+      serviceComponentHistory = new ArrayList<>();
+    }
+
+    serviceComponentHistory.add(historicalEntry);
+
+    if (!equals(historicalEntry.getServiceComponentDesiredState())) {
+      historicalEntry.setServiceComponentDesiredState(this);
+    }
+  }
+
+  /**
+   * Gets the history of this component's upgrades and downgrades.
+   *
+   * @return the component history, or {@code null} if none.
+   */
+  public Collection<ServiceComponentHistoryEntity> getHistory() {
+    return serviceComponentHistory;
+  }
+
   @Override
   public boolean equals(Object o) {
     if (this == o) {

http://git-wip-us.apache.org/repos/asf/ambari/blob/e4d1475e/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ServiceComponentHistoryEntity.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ServiceComponentHistoryEntity.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ServiceComponentHistoryEntity.java
new file mode 100644
index 0000000..e7fef71
--- /dev/null
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ServiceComponentHistoryEntity.java
@@ -0,0 +1,219 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.server.orm.entities;
+
+import javax.persistence.CascadeType;
+import javax.persistence.Column;
+import javax.persistence.Entity;
+import javax.persistence.GeneratedValue;
+import javax.persistence.GenerationType;
+import javax.persistence.Id;
+import javax.persistence.JoinColumn;
+import javax.persistence.ManyToOne;
+import javax.persistence.NamedQueries;
+import javax.persistence.NamedQuery;
+import javax.persistence.Table;
+import javax.persistence.TableGenerator;
+
+import org.apache.commons.lang.ObjectUtils;
+
+/**
+ * The {@link ServiceComponentHistoryEntity} class is used to represent an
+ * upgrade or downgrade which was performed on an individual service component.
+ */
+@Entity
+@Table(name = "servicecomponent_history")
+@TableGenerator(
+    name = "servicecomponent_history_id_generator",
+    table = "ambari_sequences",
+    pkColumnName = "sequence_name",
+    valueColumnName = "sequence_value",
+    pkColumnValue = "servicecomponent_history_id_seq",
+    initialValue = 0)
+@NamedQueries({ @NamedQuery(
+    name = "ServiceComponentHistoryEntity.findByComponent",
+    query = "SELECT history FROM ServiceComponentHistoryEntity history WHERE history.m_serviceComponentDesiredStateEntity.clusterId = :clusterId AND history.m_serviceComponentDesiredStateEntity.serviceName = :serviceName AND history.m_serviceComponentDesiredStateEntity.componentName = :componentName") })
+public class ServiceComponentHistoryEntity {
+
+  @Id
+  @GeneratedValue(
+      strategy = GenerationType.TABLE,
+      generator = "servicecomponent_history_id_generator")
+  @Column(name = "id", nullable = false, updatable = false)
+  private long m_id;
+
+  @ManyToOne(optional = false, cascade = { CascadeType.MERGE })
+  @JoinColumn(name = "component_id", referencedColumnName = "id", nullable = false)
+  private ServiceComponentDesiredStateEntity m_serviceComponentDesiredStateEntity;
+
+  @ManyToOne(optional = false)
+  @JoinColumn(name = "from_stack_id", referencedColumnName = "stack_id", nullable = false)
+  private StackEntity m_fromStack;
+
+  @ManyToOne(optional = false)
+  @JoinColumn(name = "to_stack_id", referencedColumnName = "stack_id", nullable = false)
+  private StackEntity m_toStack;
+
+  @ManyToOne(optional = false)
+  @JoinColumn(name = "upgrade_id", referencedColumnName = "upgrade_id", nullable = false)
+  private UpgradeEntity m_upgradeEntity;
+
+  public ServiceComponentDesiredStateEntity getServiceComponentDesiredState() {
+    return m_serviceComponentDesiredStateEntity;
+  }
+
+  /**
+   * Sets the component associated with this historical entry.
+   *
+   * @param serviceComponentDesiredStateEntity
+   *          the component to associate with this historical entry (not
+   *          {@code null}).
+   */
+  public void setServiceComponentDesiredState(ServiceComponentDesiredStateEntity serviceComponentDesiredStateEntity) {
+    m_serviceComponentDesiredStateEntity = serviceComponentDesiredStateEntity;
+
+    if (!m_serviceComponentDesiredStateEntity.getHistory().contains(this)) {
+      m_serviceComponentDesiredStateEntity.addHistory(this);
+    }
+  }
+
+  /**
+   * @return the id
+   */
+  public long getId() {
+    return m_id;
+  }
+
+  /**
+   * @return the fromStack
+   */
+  public StackEntity getFromStack() {
+    return m_fromStack;
+  }
+
+  /**
+   * @param fromStack
+   *          the fromStack to set
+   */
+  public void setFromStack(StackEntity fromStack) {
+    m_fromStack = fromStack;
+  }
+
+  /**
+   * @return the toStack
+   */
+  public StackEntity getToStack() {
+    return m_toStack;
+  }
+
+  /**
+   * @param toStack
+   *          the toStack to set
+   */
+  public void setToStack(StackEntity toStack) {
+    m_toStack = toStack;
+  }
+
+  /**
+   * @return the upgradeEntity
+   */
+  public UpgradeEntity getUpgrade() {
+    return m_upgradeEntity;
+  }
+
+  /**
+   * @param upgradeEntity
+   *          the upgradeEntity to set
+   */
+  public void setUpgrade(UpgradeEntity upgradeEntity) {
+    m_upgradeEntity = upgradeEntity;
+  }
+
+  /**
+   * {@inheritDoc}
+   */
+  @Override
+  public int hashCode() {
+    final int prime = 31;
+    int result = 1;
+    result = prime * result + ObjectUtils.hashCode(m_fromStack);
+    result = prime * result + (int) (m_id ^ (m_id >>> 32));
+    result = prime * result + ObjectUtils.hashCode(m_serviceComponentDesiredStateEntity);
+    result = prime * result + ObjectUtils.hashCode(m_toStack);
+    result = prime * result + ObjectUtils.hashCode(m_upgradeEntity);
+    return result;
+  }
+
+  /**
+   * {@inheritDoc}
+   */
+  @Override
+  public boolean equals(Object obj) {
+    if (this == obj) {
+      return true;
+    }
+
+    if (obj == null) {
+      return false;
+    }
+
+    if (getClass() != obj.getClass()) {
+      return false;
+    }
+
+    ServiceComponentHistoryEntity other = (ServiceComponentHistoryEntity) obj;
+    if (m_fromStack == null) {
+      if (other.m_fromStack != null) {
+        return false;
+      }
+    } else if (!m_fromStack.equals(other.m_fromStack)) {
+      return false;
+    }
+
+    if (m_id != other.m_id) {
+      return false;
+    }
+
+    if (m_serviceComponentDesiredStateEntity == null) {
+      if (other.m_serviceComponentDesiredStateEntity != null) {
+        return false;
+      }
+    } else if (!m_serviceComponentDesiredStateEntity.equals(
+        other.m_serviceComponentDesiredStateEntity)) {
+      return false;
+    }
+
+    if (m_toStack == null) {
+      if (other.m_toStack != null) {
+        return false;
+      }
+    } else if (!m_toStack.equals(other.m_toStack)) {
+      return false;
+    }
+
+    if (m_upgradeEntity == null) {
+      if (other.m_upgradeEntity != null) {
+        return false;
+      }
+    } else if (!m_upgradeEntity.equals(other.m_upgradeEntity)) {
+      return false;
+    }
+
+    return true;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e4d1475e/ambari-server/src/main/resources/Ambari-DDL-Derby-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-Derby-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-Derby-CREATE.sql
index 2865cf9..2db745b 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-Derby-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-Derby-CREATE.sql
@@ -177,12 +177,10 @@ CREATE TABLE servicecomponentdesiredstate (
   desired_stack_id BIGINT NOT NULL,
   desired_state VARCHAR(255) NOT NULL,
   service_name VARCHAR(255) NOT NULL,
-  CONSTRAINT pk_servicecomponentdesiredstate PRIMARY KEY (id),
-  CONSTRAINT unq_scdesiredstate_name UNIQUE(component_name,service_name,cluster_id)
+  CONSTRAINT pk_sc_desiredstate PRIMARY KEY (id),
+  CONSTRAINT unq_scdesiredstate_name UNIQUE(component_name, service_name, cluster_id)
 );
 
-CREATE INDEX idx_sc_desired_state ON servicecomponentdesiredstate(component_name, service_name, cluster_id);
-
 CREATE TABLE servicedesiredstate (
   cluster_id BIGINT NOT NULL,
   desired_host_role_mapping INTEGER NOT NULL,
@@ -263,7 +261,7 @@ CREATE TABLE stage (
   request_id BIGINT NOT NULL,
   cluster_id BIGINT NOT NULL,
   skippable SMALLINT DEFAULT 0 NOT NULL,
-  supports_auto_skip_failure SMALLINT DEFAULT 0 NOT NULL,  
+  supports_auto_skip_failure SMALLINT DEFAULT 0 NOT NULL,
   log_info VARCHAR(255) NOT NULL,
   request_context VARCHAR(255),
   cluster_host_info BLOB NOT NULL,
@@ -703,8 +701,8 @@ ALTER TABLE clusterstate ADD CONSTRAINT FK_clusterstate_cluster_id FOREIGN KEY (
 ALTER TABLE cluster_version ADD CONSTRAINT FK_cluster_version_cluster_id FOREIGN KEY (cluster_id) REFERENCES clusters (cluster_id);
 ALTER TABLE cluster_version ADD CONSTRAINT FK_cluster_version_repovers_id FOREIGN KEY (repo_version_id) REFERENCES repo_version (repo_version_id);
 ALTER TABLE hostcomponentdesiredstate ADD CONSTRAINT FK_hcdesiredstate_host_id FOREIGN KEY (host_id) REFERENCES hosts (host_id);
-ALTER TABLE hostcomponentdesiredstate ADD CONSTRAINT hstcmpnntdesiredstatecmpnntnme FOREIGN KEY (component_name, cluster_id, service_name) REFERENCES servicecomponentdesiredstate (component_name, cluster_id, service_name);
-ALTER TABLE hostcomponentstate ADD CONSTRAINT hstcomponentstatecomponentname FOREIGN KEY (component_name, cluster_id, service_name) REFERENCES servicecomponentdesiredstate (component_name, cluster_id, service_name);
+ALTER TABLE hostcomponentdesiredstate ADD CONSTRAINT hstcmpnntdesiredstatecmpnntnme FOREIGN KEY (component_name, service_name, cluster_id) REFERENCES servicecomponentdesiredstate (component_name, service_name, cluster_id);
+ALTER TABLE hostcomponentstate ADD CONSTRAINT hstcomponentstatecomponentname FOREIGN KEY (component_name, service_name, cluster_id) REFERENCES servicecomponentdesiredstate (component_name, service_name, cluster_id);
 ALTER TABLE hostcomponentstate ADD CONSTRAINT FK_hostcomponentstate_host_id FOREIGN KEY (host_id) REFERENCES hosts (host_id);
 ALTER TABLE hoststate ADD CONSTRAINT FK_hoststate_host_id FOREIGN KEY (host_id) REFERENCES hosts (host_id);
 ALTER TABLE host_version ADD CONSTRAINT FK_host_version_host_id FOREIGN KEY (host_id) REFERENCES hosts (host_id);
@@ -954,13 +952,26 @@ CREATE TABLE upgrade_item (
   FOREIGN KEY (upgrade_group_id) REFERENCES upgrade_group(upgrade_group_id)
 );
 
+CREATE TABLE servicecomponent_history(
+  id BIGINT NOT NULL,
+  component_id BIGINT NOT NULL,
+  upgrade_id BIGINT NOT NULL,
+  from_stack_id BIGINT NOT NULL,
+  to_stack_id BIGINT NOT NULL,
+  CONSTRAINT PK_sc_history PRIMARY KEY (id),
+  CONSTRAINT FK_sc_history_component_id FOREIGN KEY (component_id) REFERENCES servicecomponentdesiredstate (id),
+  CONSTRAINT FK_sc_history_upgrade_id FOREIGN KEY (upgrade_id) REFERENCES upgrade (upgrade_id),
+  CONSTRAINT FK_sc_history_from_stack_id FOREIGN KEY (from_stack_id) REFERENCES stack (stack_id),
+  CONSTRAINT FK_sc_history_to_stack_id FOREIGN KEY (to_stack_id) REFERENCES stack (stack_id)
+);
+
 ---------inserting some data-----------
 -- In order for the first ID to be 1, must initialize the ambari_sequences table with a sequence_value of 0.
 -- BEGIN;
 INSERT INTO ambari_sequences (sequence_name, sequence_value)
   SELECT 'cluster_id_seq', 1 FROM SYSIBM.SYSDUMMY1
   UNION ALL
-  SELECT 'host_id_seq', 0 FROM SYSIBM.SYSDUMMY1	
+  SELECT 'host_id_seq', 0 FROM SYSIBM.SYSDUMMY1
   UNION ALL
   SELECT 'user_id_seq', 2 FROM SYSIBM.SYSDUMMY1
   UNION ALL
@@ -1044,9 +1055,12 @@ INSERT INTO ambari_sequences (sequence_name, sequence_value)
   union all
   select 'setting_id_seq', 0 FROM SYSIBM.SYSDUMMY1
   union all
-  select 'hostcomponentstate_id_seq', 0  FROM SYSIBM.SYSDUMMY1
+  select 'hostcomponentstate_id_seq', 0 FROM SYSIBM.SYSDUMMY1
+  union all
+  select 'servicecomponentdesiredstate_id_seq', 0 FROM SYSIBM.SYSDUMMY1
   union all
-  select 'servicecomponentdesiredstate_id_seq', 0  FROM SYSIBM.SYSDUMMY1;
+  select 'servicecomponent_history_id_seq', 0 FROM SYSIBM.SYSDUMMY1;
+
 
 INSERT INTO adminresourcetype (resource_type_id, resource_type_name)
   SELECT 1, 'AMBARI' FROM SYSIBM.SYSDUMMY1

http://git-wip-us.apache.org/repos/asf/ambari/blob/e4d1475e/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql
index 93ec226..8c626f5 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql
@@ -1,5 +1,5 @@
 --
--- Licensed to the stackpache Software Foundation (ASF) under one
+-- Licensed to the Apache Software Foundation (ASF) under one
 -- or more contributor license agreements.  See the NOTICE file
 -- distributed with this work for additional information
 -- regarding copyright ownership.  The ASF licenses this file
@@ -178,12 +178,10 @@ CREATE TABLE servicecomponentdesiredstate (
   desired_stack_id BIGINT NOT NULL,
   desired_state VARCHAR(255) NOT NULL,
   service_name VARCHAR(100) NOT NULL,
-  CONSTRAINT pk_servicecomponentdesiredstate PRIMARY KEY (id),
-  CONSTRAINT unq_scdesiredstate_name UNIQUE(component_name,service_name,cluster_id)
+  CONSTRAINT pk_sc_desiredstate PRIMARY KEY (id),
+  CONSTRAINT unq_scdesiredstate_name UNIQUE(component_name, service_name, cluster_id)
 );
 
-CREATE INDEX idx_sc_desired_state ON servicecomponentdesiredstate(component_name, service_name, cluster_id);
-
 CREATE TABLE servicedesiredstate (
   cluster_id BIGINT NOT NULL,
   desired_host_role_mapping INTEGER NOT NULL,
@@ -713,8 +711,8 @@ ALTER TABLE clusterstate ADD CONSTRAINT FK_clusterstate_cluster_id FOREIGN KEY (
 ALTER TABLE cluster_version ADD CONSTRAINT FK_cluster_version_cluster_id FOREIGN KEY (cluster_id) REFERENCES clusters (cluster_id);
 ALTER TABLE cluster_version ADD CONSTRAINT FK_cluster_version_repovers_id FOREIGN KEY (repo_version_id) REFERENCES repo_version (repo_version_id);
 ALTER TABLE hostcomponentdesiredstate ADD CONSTRAINT FK_hcdesiredstate_host_id FOREIGN KEY (host_id) REFERENCES hosts (host_id);
-ALTER TABLE hostcomponentdesiredstate ADD CONSTRAINT hstcmpnntdesiredstatecmpnntnme FOREIGN KEY (component_name, cluster_id, service_name) REFERENCES servicecomponentdesiredstate (component_name, cluster_id, service_name);
-ALTER TABLE hostcomponentstate ADD CONSTRAINT hstcomponentstatecomponentname FOREIGN KEY (component_name, cluster_id, service_name) REFERENCES servicecomponentdesiredstate (component_name, cluster_id, service_name);
+ALTER TABLE hostcomponentdesiredstate ADD CONSTRAINT hstcmpnntdesiredstatecmpnntnme FOREIGN KEY (component_name, service_name, cluster_id) REFERENCES servicecomponentdesiredstate (component_name, service_name, cluster_id);
+ALTER TABLE hostcomponentstate ADD CONSTRAINT hstcomponentstatecomponentname FOREIGN KEY (component_name, service_name, cluster_id) REFERENCES servicecomponentdesiredstate (component_name, service_name, cluster_id);
 ALTER TABLE hostcomponentstate ADD CONSTRAINT FK_hostcomponentstate_host_id FOREIGN KEY (host_id) REFERENCES hosts (host_id);
 ALTER TABLE hoststate ADD CONSTRAINT FK_hoststate_host_id FOREIGN KEY (host_id) REFERENCES hosts (host_id);
 ALTER TABLE host_version ADD CONSTRAINT FK_host_version_host_id FOREIGN KEY (host_id) REFERENCES hosts (host_id);
@@ -964,6 +962,19 @@ CREATE TABLE upgrade_item (
   FOREIGN KEY (upgrade_group_id) REFERENCES upgrade_group(upgrade_group_id)
 );
 
+CREATE TABLE servicecomponent_history(
+  id BIGINT NOT NULL,
+  component_id BIGINT NOT NULL,
+  upgrade_id BIGINT NOT NULL,
+  from_stack_id BIGINT NOT NULL,
+  to_stack_id BIGINT NOT NULL,
+  CONSTRAINT PK_sc_history PRIMARY KEY (id),
+  CONSTRAINT FK_sc_history_component_id FOREIGN KEY (component_id) REFERENCES servicecomponentdesiredstate (id),
+  CONSTRAINT FK_sc_history_upgrade_id FOREIGN KEY (upgrade_id) REFERENCES upgrade (upgrade_id),
+  CONSTRAINT FK_sc_history_from_stack_id FOREIGN KEY (from_stack_id) REFERENCES stack (stack_id),
+  CONSTRAINT FK_sc_history_to_stack_id FOREIGN KEY (to_stack_id) REFERENCES stack (stack_id)
+);
+
 -- In order for the first ID to be 1, must initialize the ambari_sequences table with a sequence_value of 0.
 INSERT INTO ambari_sequences(sequence_name, sequence_value) values ('cluster_id_seq', 1);
 INSERT INTO ambari_sequences(sequence_name, sequence_value) values ('host_id_seq', 0);
@@ -1009,6 +1020,7 @@ INSERT INTO ambari_sequences(sequence_name, sequence_value) values ('topology_re
 INSERT INTO ambari_sequences(sequence_name, sequence_value) values ('topology_host_group_id_seq', 0);
 INSERT INTO ambari_sequences(sequence_name, sequence_value) values ('setting_id_seq', 0);
 INSERT INTO ambari_sequences(sequence_name, sequence_value) values ('hostcomponentstate_id_seq', 0);
+INSERT INTO ambari_sequences(sequence_name, sequence_value) values ('servicecomponent_history_id_seq', 0);
 
 insert into adminresourcetype (resource_type_id, resource_type_name)
   select 1, 'AMBARI'

http://git-wip-us.apache.org/repos/asf/ambari/blob/e4d1475e/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql
index b2d014b..f8e4ee1 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql
@@ -168,12 +168,10 @@ CREATE TABLE servicecomponentdesiredstate (
   desired_stack_id NUMBER(19) NOT NULL,
   desired_state VARCHAR2(255) NOT NULL,
   service_name VARCHAR2(255) NOT NULL,
-  CONSTRAINT pk_servicecomponentdesiredstate PRIMARY KEY (alert_id),
-  CONSTRAINT unq_scdesiredstate_name UNIQUE(component_name,service_name,cluster_id)
+  CONSTRAINT pk_sc_desiredstate PRIMARY KEY (id),
+  CONSTRAINT unq_scdesiredstate_name UNIQUE(component_name, service_name, cluster_id)
 );
 
-CREATE INDEX idx_sc_desired_state ON servicecomponentdesiredstate(component_name, service_name, cluster_id);
-
 CREATE TABLE servicedesiredstate (
   cluster_id NUMBER(19) NOT NULL,
   desired_host_role_mapping NUMBER(10) NOT NULL,
@@ -252,7 +250,7 @@ CREATE TABLE stage (
   request_id NUMBER(19) NOT NULL,
   cluster_id NUMBER(19) NULL,
   skippable NUMBER(1) DEFAULT 0 NOT NULL,
-  supports_auto_skip_failure NUMBER(1) DEFAULT 0 NOT NULL,  
+  supports_auto_skip_failure NUMBER(1) DEFAULT 0 NOT NULL,
   log_info VARCHAR2(255) NULL,
   request_context VARCHAR2(255) NULL,
   cluster_host_info BLOB NOT NULL,
@@ -704,8 +702,8 @@ ALTER TABLE clusterstate ADD CONSTRAINT FK_clusterstate_cluster_id FOREIGN KEY (
 ALTER TABLE cluster_version ADD CONSTRAINT FK_cluster_version_cluster_id FOREIGN KEY (cluster_id) REFERENCES clusters (cluster_id);
 ALTER TABLE cluster_version ADD CONSTRAINT FK_cluster_version_repovers_id FOREIGN KEY (repo_version_id) REFERENCES repo_version (repo_version_id);
 ALTER TABLE hostcomponentdesiredstate ADD CONSTRAINT FK_hcdesiredstate_host_id FOREIGN KEY (host_id) REFERENCES hosts (host_id);
-ALTER TABLE hostcomponentdesiredstate ADD CONSTRAINT hstcmpnntdesiredstatecmpnntnme FOREIGN KEY (component_name, cluster_id, service_name) REFERENCES servicecomponentdesiredstate (component_name, cluster_id, service_name);
-ALTER TABLE hostcomponentstate ADD CONSTRAINT hstcomponentstatecomponentname FOREIGN KEY (component_name, cluster_id, service_name) REFERENCES servicecomponentdesiredstate (component_name, cluster_id, service_name);
+ALTER TABLE hostcomponentdesiredstate ADD CONSTRAINT hstcmpnntdesiredstatecmpnntnme FOREIGN KEY (component_name, service_name, cluster_id) REFERENCES servicecomponentdesiredstate (component_name, service_name, cluster_id);
+ALTER TABLE hostcomponentstate ADD CONSTRAINT hstcomponentstatecomponentname FOREIGN KEY (component_name, service_name, cluster_id) REFERENCES servicecomponentdesiredstate (component_name, service_name, cluster_id);
 ALTER TABLE hostcomponentstate ADD CONSTRAINT FK_hostcomponentstate_host_id FOREIGN KEY (host_id) REFERENCES hosts (host_id);
 ALTER TABLE hoststate ADD CONSTRAINT FK_hoststate_host_id FOREIGN KEY (host_id) REFERENCES hosts (host_id);
 ALTER TABLE host_version ADD CONSTRAINT FK_host_version_host_id FOREIGN KEY (host_id) REFERENCES hosts (host_id);
@@ -953,6 +951,19 @@ CREATE TABLE upgrade_item (
   FOREIGN KEY (upgrade_group_id) REFERENCES upgrade_group(upgrade_group_id)
 );
 
+CREATE TABLE servicecomponent_history(
+  id NUMBER(19) NOT NULL,
+  component_id NUMBER(19) NOT NULL,
+  upgrade_id NUMBER(19) NOT NULL,
+  from_stack_id NUMBER(19) NOT NULL,
+  to_stack_id NUMBER(19) NOT NULL,
+  CONSTRAINT PK_sc_history PRIMARY KEY (id),
+  CONSTRAINT FK_sc_history_component_id FOREIGN KEY (component_id) REFERENCES servicecomponentdesiredstate (id),
+  CONSTRAINT FK_sc_history_upgrade_id FOREIGN KEY (upgrade_id) REFERENCES upgrade (upgrade_id),
+  CONSTRAINT FK_sc_history_from_stack_id FOREIGN KEY (from_stack_id) REFERENCES stack (stack_id),
+  CONSTRAINT FK_sc_history_to_stack_id FOREIGN KEY (to_stack_id) REFERENCES stack (stack_id)
+);
+
 ---------inserting some data-----------
 -- In order for the first ID to be 1, must initialize the ambari_sequences table with a sequence_value of 0.
 INSERT INTO ambari_sequences(sequence_name, sequence_value) values ('host_role_command_id_seq', 0);
@@ -1000,6 +1011,7 @@ INSERT INTO ambari_sequences(sequence_name, sequence_value) values ('topology_ho
 INSERT INTO ambari_sequences(sequence_name, sequence_value) values ('setting_id_seq', 0);
 INSERT INTO ambari_sequences(sequence_name, sequence_value) values ('hostcomponentstate_id_seq', 0);
 INSERT INTO ambari_sequences(sequence_name, sequence_value) values ('servicecomponentdesiredstate_id_seq', 0);
+INSERT INTO ambari_sequences(sequence_name, sequence_value) values ('servicecomponent_history_id_seq', 0);
 
 INSERT INTO metainfo("metainfo_key", "metainfo_value") values ('version', '${ambariVersion}');
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/e4d1475e/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql
index 7c648b2..e3ce96c 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql
@@ -177,13 +177,10 @@ CREATE TABLE servicecomponentdesiredstate (
   desired_stack_id BIGINT NOT NULL,
   desired_state VARCHAR(255) NOT NULL,
   service_name VARCHAR(255) NOT NULL,
-  CONSTRAINT pk_servicecomponentdesiredstate PRIMARY KEY (id),
-  CONSTRAINT unq_scdesiredstate_name UNIQUE(component_name,service_name,cluster_id)
+  CONSTRAINT pk_sc_desiredstate PRIMARY KEY (id),
+  CONSTRAINT unq_scdesiredstate_name UNIQUE(component_name, service_name, cluster_id)
 );
 
-CREATE INDEX idx_sc_desired_state ON servicecomponentdesiredstate(component_name, service_name, cluster_id);
-
-
 CREATE TABLE servicedesiredstate (
   cluster_id BIGINT NOT NULL,
   desired_host_role_mapping INTEGER NOT NULL,
@@ -264,7 +261,7 @@ CREATE TABLE stage (
   request_id BIGINT NOT NULL,
   cluster_id BIGINT NOT NULL,
   skippable SMALLINT DEFAULT 0 NOT NULL,
-  supports_auto_skip_failure SMALLINT DEFAULT 0 NOT NULL,  
+  supports_auto_skip_failure SMALLINT DEFAULT 0 NOT NULL,
   log_info VARCHAR(255) NOT NULL,
   request_context VARCHAR(255),
   cluster_host_info BYTEA NOT NULL,
@@ -705,8 +702,8 @@ ALTER TABLE clusterstate ADD CONSTRAINT FK_clusterstate_cluster_id FOREIGN KEY (
 ALTER TABLE cluster_version ADD CONSTRAINT FK_cluster_version_cluster_id FOREIGN KEY (cluster_id) REFERENCES clusters (cluster_id);
 ALTER TABLE cluster_version ADD CONSTRAINT FK_cluster_version_repovers_id FOREIGN KEY (repo_version_id) REFERENCES repo_version (repo_version_id);
 ALTER TABLE hostcomponentdesiredstate ADD CONSTRAINT FK_hcdesiredstate_host_id FOREIGN KEY (host_id) REFERENCES hosts (host_id);
-ALTER TABLE hostcomponentdesiredstate ADD CONSTRAINT hstcmpnntdesiredstatecmpnntnme FOREIGN KEY (component_name, cluster_id, service_name) REFERENCES servicecomponentdesiredstate (component_name, cluster_id, service_name);
-ALTER TABLE hostcomponentstate ADD CONSTRAINT hstcomponentstatecomponentname FOREIGN KEY (component_name, cluster_id, service_name) REFERENCES servicecomponentdesiredstate (component_name, cluster_id, service_name);
+ALTER TABLE hostcomponentdesiredstate ADD CONSTRAINT hstcmpnntdesiredstatecmpnntnme FOREIGN KEY (component_name, service_name, cluster_id) REFERENCES servicecomponentdesiredstate (component_name, service_name, cluster_id);
+ALTER TABLE hostcomponentstate ADD CONSTRAINT hstcomponentstatecomponentname FOREIGN KEY (component_name, service_name, cluster_id) REFERENCES servicecomponentdesiredstate (component_name, service_name, cluster_id);
 ALTER TABLE hostcomponentstate ADD CONSTRAINT FK_hostcomponentstate_host_id FOREIGN KEY (host_id) REFERENCES hosts (host_id);
 ALTER TABLE hoststate ADD CONSTRAINT FK_hoststate_host_id FOREIGN KEY (host_id) REFERENCES hosts (host_id);
 ALTER TABLE host_version ADD CONSTRAINT FK_host_version_host_id FOREIGN KEY (host_id) REFERENCES hosts (host_id);
@@ -956,6 +953,19 @@ CREATE TABLE upgrade_item (
   FOREIGN KEY (upgrade_group_id) REFERENCES upgrade_group(upgrade_group_id)
 );
 
+CREATE TABLE servicecomponent_history(
+  id BIGINT NOT NULL,
+  component_id BIGINT NOT NULL,
+  upgrade_id BIGINT NOT NULL,
+  from_stack_id BIGINT NOT NULL,
+  to_stack_id BIGINT NOT NULL,
+  CONSTRAINT PK_sc_history PRIMARY KEY (id),
+  CONSTRAINT FK_sc_history_component_id FOREIGN KEY (component_id) REFERENCES servicecomponentdesiredstate (id),
+  CONSTRAINT FK_sc_history_upgrade_id FOREIGN KEY (upgrade_id) REFERENCES upgrade (upgrade_id),
+  CONSTRAINT FK_sc_history_from_stack_id FOREIGN KEY (from_stack_id) REFERENCES stack (stack_id),
+  CONSTRAINT FK_sc_history_to_stack_id FOREIGN KEY (to_stack_id) REFERENCES stack (stack_id)
+);
+
 ---------inserting some data-----------
 -- In order for the first ID to be 1, must initialize the ambari_sequences table with a sequence_value of 0.
 BEGIN;
@@ -1048,7 +1058,10 @@ INSERT INTO ambari_sequences (sequence_name, sequence_value)
   union all
   select 'hostcomponentstate_id_seq', 0
   union all
-  select 'servicecomponentdesiredstate_id_seq', 0;
+  select 'servicecomponentdesiredstate_id_seq', 0
+  union all
+  select 'servicecomponent_history_id_seq', 0;
+
 
 INSERT INTO adminresourcetype (resource_type_id, resource_type_name)
   SELECT 1, 'AMBARI'

http://git-wip-us.apache.org/repos/asf/ambari/blob/e4d1475e/ambari-server/src/main/resources/Ambari-DDL-Postgres-EMBEDDED-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-Postgres-EMBEDDED-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-Postgres-EMBEDDED-CREATE.sql
index 8e21f67..5d47c28 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-Postgres-EMBEDDED-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-Postgres-EMBEDDED-CREATE.sql
@@ -202,13 +202,11 @@ CREATE TABLE ambari.servicecomponentdesiredstate (
   desired_stack_id BIGINT NOT NULL,
   desired_state VARCHAR(255) NOT NULL,
   service_name VARCHAR(255) NOT NULL,
-  CONSTRAINT pk_servicecomponentdesiredstate PRIMARY KEY (id),
-  CONSTRAINT unq_scdesiredstate_name UNIQUE(component_name,service_name,cluster_id)
+  CONSTRAINT pk_sc_desiredstate PRIMARY KEY (id),
+  CONSTRAINT unq_scdesiredstate_name UNIQUE(component_name, service_name, cluster_id)
 );
 GRANT ALL PRIVILEGES ON TABLE ambari.servicecomponentdesiredstate TO :username;
 
-CREATE INDEX idx_sc_desired_state ON ambari.servicecomponentdesiredstate(component_name, service_name, cluster_id);
-
 CREATE TABLE ambari.servicedesiredstate (
   cluster_id BIGINT NOT NULL,
   desired_host_role_mapping INTEGER NOT NULL,
@@ -786,8 +784,8 @@ ALTER TABLE ambari.clusterstate ADD CONSTRAINT FK_clusterstate_cluster_id FOREIG
 ALTER TABLE ambari.cluster_version ADD CONSTRAINT FK_cluster_version_cluster_id FOREIGN KEY (cluster_id) REFERENCES ambari.clusters (cluster_id);
 ALTER TABLE ambari.cluster_version ADD CONSTRAINT FK_cluster_version_repovers_id FOREIGN KEY (repo_version_id) REFERENCES ambari.repo_version (repo_version_id);
 ALTER TABLE ambari.hostcomponentdesiredstate ADD CONSTRAINT FK_hcdesiredstate_host_id FOREIGN KEY (host_id) REFERENCES ambari.hosts (host_id);
-ALTER TABLE ambari.hostcomponentdesiredstate ADD CONSTRAINT hstcmpnntdesiredstatecmpnntnme FOREIGN KEY (component_name, cluster_id, service_name) REFERENCES ambari.servicecomponentdesiredstate (component_name, cluster_id, service_name);
-ALTER TABLE ambari.hostcomponentstate ADD CONSTRAINT hstcomponentstatecomponentname FOREIGN KEY (component_name, cluster_id, service_name) REFERENCES ambari.servicecomponentdesiredstate (component_name, cluster_id, service_name);
+ALTER TABLE ambari.hostcomponentdesiredstate ADD CONSTRAINT hstcmpnntdesiredstatecmpnntnme FOREIGN KEY (component_name, service_name, cluster_id) REFERENCES ambari.servicecomponentdesiredstate (component_name, service_name, cluster_id);
+ALTER TABLE ambari.hostcomponentstate ADD CONSTRAINT hstcomponentstatecomponentname FOREIGN KEY (component_name, service_name, cluster_id) REFERENCES ambari.servicecomponentdesiredstate (component_name, service_name, cluster_id);
 ALTER TABLE ambari.hostcomponentstate ADD CONSTRAINT FK_hostcomponentstate_host_id FOREIGN KEY (host_id) REFERENCES ambari.hosts (host_id);
 ALTER TABLE ambari.hoststate ADD CONSTRAINT FK_hoststate_host_id FOREIGN KEY (host_id) REFERENCES ambari.hosts (host_id);
 ALTER TABLE ambari.host_version ADD CONSTRAINT FK_host_version_host_id FOREIGN KEY (host_id) REFERENCES ambari.hosts (host_id);
@@ -1054,6 +1052,20 @@ GRANT ALL PRIVILEGES ON TABLE ambari.upgrade TO :username;
 GRANT ALL PRIVILEGES ON TABLE ambari.upgrade_group TO :username;
 GRANT ALL PRIVILEGES ON TABLE ambari.upgrade_item TO :username;
 
+CREATE TABLE ambari.servicecomponent_history(
+  id BIGINT NOT NULL,
+  component_id BIGINT NOT NULL,
+  upgrade_id BIGINT NOT NULL,
+  from_stack_id BIGINT NOT NULL,
+  to_stack_id BIGINT NOT NULL,
+  CONSTRAINT PK_sc_history PRIMARY KEY (id),
+  CONSTRAINT FK_sc_history_component_id FOREIGN KEY (component_id) REFERENCES ambari.servicecomponentdesiredstate (id),
+  CONSTRAINT FK_sc_history_upgrade_id FOREIGN KEY (upgrade_id) REFERENCES ambari.upgrade (upgrade_id),
+  CONSTRAINT FK_sc_history_from_stack_id FOREIGN KEY (from_stack_id) REFERENCES ambari.stack (stack_id),
+  CONSTRAINT FK_sc_history_to_stack_id FOREIGN KEY (to_stack_id) REFERENCES ambari.stack (stack_id)
+);
+GRANT ALL PRIVILEGES ON TABLE ambari.servicecomponent_history TO :username;
+
 ---------inserting some data-----------
 -- In order for the first ID to be 1, must initialize the ambari_sequences table with a sequence_value of 0.
 BEGIN;
@@ -1146,7 +1158,9 @@ INSERT INTO ambari.ambari_sequences (sequence_name, sequence_value)
   union all
   select 'hostcomponentstate_id_seq', 0
   union all
-  select 'servicecomponentdesiredstate_id_seq', 0;
+  select 'servicecomponentdesiredstate_id_seq', 0
+  union all
+  select 'servicecomponent_history_id_seq', 0;
 
 INSERT INTO ambari.adminresourcetype (resource_type_id, resource_type_name)
   SELECT 1, 'AMBARI'

http://git-wip-us.apache.org/repos/asf/ambari/blob/e4d1475e/ambari-server/src/main/resources/Ambari-DDL-SQLAnywhere-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-SQLAnywhere-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-SQLAnywhere-CREATE.sql
index f4ce830..263004f 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-SQLAnywhere-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-SQLAnywhere-CREATE.sql
@@ -1,5 +1,5 @@
 --
--- Licensed to the stackpache Software Foundation (ASF) under one
+-- Licensed to the Apache Software Foundation (ASF) under one
 -- or more contributor license agreements.  See the NOTICE file
 -- distributed with this work for additional information
 -- regarding copyright ownership.  The ASF licenses this file
@@ -167,12 +167,10 @@ CREATE TABLE servicecomponentdesiredstate (
   desired_stack_id NUMERIC(19) NOT NULL,
   desired_state VARCHAR(255) NOT NULL,
   service_name VARCHAR(255) NOT NULL,
-  CONSTRAINT pk_servicecomponentdesiredstate PRIMARY KEY (id),
-  CONSTRAINT unq_scdesiredstate_name UNIQUE(component_name,service_name,cluster_id)
+  CONSTRAINT pk_sc_desiredstate PRIMARY KEY (id),
+  CONSTRAINT unq_scdesiredstate_name UNIQUE(component_name, service_name, cluster_id)
 );
 
-CREATE INDEX idx_sc_desired_state ON servicecomponentdesiredstate(component_name, service_name, cluster_id);
-
 CREATE TABLE servicedesiredstate (
   cluster_id NUMERIC(19) NOT NULL,
   desired_host_role_mapping INTEGER NOT NULL,
@@ -671,6 +669,7 @@ CREATE TABLE setting (
   update_timestamp NUMERIC(19) NOT NULL,
   PRIMARY KEY (id)
 );
+
 -- tasks indices --
 CREATE INDEX idx_stage_request_id ON stage (request_id);
 CREATE INDEX idx_hrc_request_id ON host_role_command (request_id);
@@ -702,8 +701,8 @@ ALTER TABLE clusterstate ADD CONSTRAINT FK_clusterstate_cluster_id FOREIGN KEY (
 ALTER TABLE cluster_version ADD CONSTRAINT FK_cluster_version_cluster_id FOREIGN KEY (cluster_id) REFERENCES clusters (cluster_id);
 ALTER TABLE cluster_version ADD CONSTRAINT FK_cluster_version_repovers_id FOREIGN KEY (repo_version_id) REFERENCES repo_version (repo_version_id);
 ALTER TABLE hostcomponentdesiredstate ADD CONSTRAINT FK_hcdesiredstate_host_id FOREIGN KEY (host_id) REFERENCES hosts (host_id);
-ALTER TABLE hostcomponentdesiredstate ADD CONSTRAINT hstcmpnntdesiredstatecmpnntnme FOREIGN KEY (component_name, cluster_id, service_name) REFERENCES servicecomponentdesiredstate (component_name, cluster_id, service_name);
-ALTER TABLE hostcomponentstate ADD CONSTRAINT hstcomponentstatecomponentname FOREIGN KEY (component_name, cluster_id, service_name) REFERENCES servicecomponentdesiredstate (component_name, cluster_id, service_name);
+ALTER TABLE hostcomponentdesiredstate ADD CONSTRAINT hstcmpnntdesiredstatecmpnntnme FOREIGN KEY (component_name, service_name, cluster_id) REFERENCES servicecomponentdesiredstate (component_name, service_name, cluster_id);
+ALTER TABLE hostcomponentstate ADD CONSTRAINT hstcomponentstatecomponentname FOREIGN KEY (component_name, service_name, cluster_id) REFERENCES servicecomponentdesiredstate (component_name, service_name, cluster_id);
 ALTER TABLE hostcomponentstate ADD CONSTRAINT FK_hostcomponentstate_host_id FOREIGN KEY (host_id) REFERENCES hosts (host_id);
 ALTER TABLE hoststate ADD CONSTRAINT FK_hoststate_host_id FOREIGN KEY (host_id) REFERENCES hosts (host_id);
 ALTER TABLE host_version ADD CONSTRAINT FK_host_version_host_id FOREIGN KEY (host_id) REFERENCES hosts (host_id);
@@ -951,6 +950,19 @@ CREATE TABLE upgrade_item (
   FOREIGN KEY (upgrade_group_id) REFERENCES upgrade_group(upgrade_group_id)
 );
 
+CREATE TABLE servicecomponent_history(
+  id NUMERIC(19) NOT NULL,
+  component_id NUMERIC(19) NOT NULL,
+  upgrade_id NUMERIC(19) NOT NULL,
+  from_stack_id NUMERIC(19) NOT NULL,
+  to_stack_id NUMERIC(19) NOT NULL,
+  CONSTRAINT PK_sc_history PRIMARY KEY (id),
+  CONSTRAINT FK_sc_history_component_id FOREIGN KEY (component_id) REFERENCES servicecomponentdesiredstate (id),
+  CONSTRAINT FK_sc_history_upgrade_id FOREIGN KEY (upgrade_id) REFERENCES upgrade (upgrade_id),
+  CONSTRAINT FK_sc_history_from_stack_id FOREIGN KEY (from_stack_id) REFERENCES stack (stack_id),
+  CONSTRAINT FK_sc_history_to_stack_id FOREIGN KEY (to_stack_id) REFERENCES stack (stack_id)
+);
+
 -- In order for the first ID to be 1, must initialize the ambari_sequences table with a sequence_value of 0.
 INSERT INTO ambari_sequences(sequence_name, sequence_value) values ('cluster_id_seq', 1);
 INSERT INTO ambari_sequences(sequence_name, sequence_value) values ('host_id_seq', 0);
@@ -997,6 +1009,7 @@ INSERT INTO ambari_sequences(sequence_name, sequence_value) values ('topology_ho
 INSERT INTO ambari_sequences(sequence_name, sequence_value) values ('setting_id_seq', 0);
 INSERT INTO ambari_sequences(sequence_name, sequence_value) values ('hostcomponentstate_id_seq', 0);
 INSERT INTO ambari_sequences(sequence_name, sequence_value) values ('servicecomponentdesiredstate_id_seq', 0);
+INSERT INTO ambari_sequences(sequence_name, sequence_value) values ('servicecomponent_history_id_seq', 0);
 
 insert into adminresourcetype (resource_type_id, resource_type_name)
   select 1, 'AMBARI'
@@ -1268,7 +1281,7 @@ insert into adminpermission(permission_id, permission_name, resource_type_id, pe
     SELECT permission_id, 'AMBARI.ASSIGN_ROLES' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR' UNION ALL
     SELECT permission_id, 'AMBARI.MANAGE_STACK_VERSIONS' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR' UNION ALL
     SELECT permission_id, 'AMBARI.EDIT_STACK_REPOS' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR';
-  
+
 insert into adminprivilege (privilege_id, permission_id, resource_id, principal_id)
   select 1, 1, 1, 1;
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/e4d1475e/ambari-server/src/main/resources/Ambari-DDL-SQLServer-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-SQLServer-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-SQLServer-CREATE.sql
index 47bde49..80feca5 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-SQLServer-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-SQLServer-CREATE.sql
@@ -188,12 +188,10 @@ CREATE TABLE servicecomponentdesiredstate (
   desired_state VARCHAR(255) NOT NULL,
   service_name VARCHAR(255) NOT NULL,
   PRIMARY KEY CLUSTERED (id),
-  CONSTRAINT pk_servicecomponentdesiredstate PRIMARY KEY (id),
-  CONSTRAINT unq_scdesiredstate_name UNIQUE(component_name,service_name,cluster_id)
+  CONSTRAINT pk_sc_desiredstate PRIMARY KEY (id),
+  CONSTRAINT unq_scdesiredstate_name UNIQUE(component_name, service_name, cluster_id)
 );
 
-CREATE NONCLUSTERED INDEX idx_sc_desired_state ON servicecomponentdesiredstate(component_name, service_name, cluster_id);
-
 CREATE TABLE servicedesiredstate (
   cluster_id BIGINT NOT NULL,
   desired_host_role_mapping INTEGER NOT NULL,
@@ -329,7 +327,7 @@ CREATE TABLE requestoperationlevel (
   host_id BIGINT NULL,      -- unlike most host_id columns, this one allows NULLs because the request can be at the service level
   PRIMARY KEY CLUSTERED (operation_level_id)
   );
-  
+
 CREATE TABLE ClusterHostMapping (
   cluster_id BIGINT NOT NULL,
   host_id BIGINT NOT NULL,
@@ -814,8 +812,8 @@ ALTER TABLE clusterstate ADD CONSTRAINT FK_clusterstate_cluster_id FOREIGN KEY (
 ALTER TABLE cluster_version ADD CONSTRAINT FK_cluster_version_cluster_id FOREIGN KEY (cluster_id) REFERENCES clusters (cluster_id);
 ALTER TABLE cluster_version ADD CONSTRAINT FK_cluster_version_repovers_id FOREIGN KEY (repo_version_id) REFERENCES repo_version (repo_version_id);
 ALTER TABLE hostcomponentdesiredstate ADD CONSTRAINT hstcmponentdesiredstatehstid FOREIGN KEY (host_id) REFERENCES hosts (host_id);
-ALTER TABLE hostcomponentdesiredstate ADD CONSTRAINT hstcmpnntdesiredstatecmpnntnme FOREIGN KEY (component_name, cluster_id, service_name) REFERENCES servicecomponentdesiredstate (component_name, cluster_id, service_name);
-ALTER TABLE hostcomponentstate ADD CONSTRAINT hstcomponentstatecomponentname FOREIGN KEY (component_name, cluster_id, service_name) REFERENCES servicecomponentdesiredstate (component_name, cluster_id, service_name);
+ALTER TABLE hostcomponentdesiredstate ADD CONSTRAINT hstcmpnntdesiredstatecmpnntnme FOREIGN KEY (component_name, service_name, cluster_id) REFERENCES servicecomponentdesiredstate (component_name, service_name, cluster_id);
+ALTER TABLE hostcomponentstate ADD CONSTRAINT hstcomponentstatecomponentname FOREIGN KEY (component_name, service_name, cluster_id) REFERENCES servicecomponentdesiredstate (component_name, service_name, cluster_id);
 ALTER TABLE hostcomponentstate ADD CONSTRAINT FK_hostcomponentstate_host_id FOREIGN KEY (host_id) REFERENCES hosts (host_id);
 ALTER TABLE hoststate ADD CONSTRAINT FK_hoststate_host_id FOREIGN KEY (host_id) REFERENCES hosts (host_id);
 ALTER TABLE host_version ADD CONSTRAINT FK_host_version_host_id FOREIGN KEY (host_id) REFERENCES hosts (host_id);
@@ -1065,6 +1063,19 @@ CREATE TABLE upgrade_item (
   FOREIGN KEY (upgrade_group_id) REFERENCES upgrade_group(upgrade_group_id)
 );
 
+CREATE TABLE servicecomponent_history(
+  id BIGINT NOT NULL,
+  component_id BIGINT NOT NULL,
+  upgrade_id BIGINT NOT NULL,
+  from_stack_id BIGINT NOT NULL,
+  to_stack_id BIGINT NOT NULL,
+  CONSTRAINT PK_sc_history PRIMARY KEY (id),
+  CONSTRAINT FK_sc_history_component_id FOREIGN KEY (component_id) REFERENCES servicecomponentdesiredstate (id),
+  CONSTRAINT FK_sc_history_upgrade_id FOREIGN KEY (upgrade_id) REFERENCES upgrade (upgrade_id),
+  CONSTRAINT FK_sc_history_from_stack_id FOREIGN KEY (from_stack_id) REFERENCES stack (stack_id),
+  CONSTRAINT FK_sc_history_to_stack_id FOREIGN KEY (to_stack_id) REFERENCES stack (stack_id)
+);
+
 ---------inserting some data-----------
 BEGIN TRANSACTION
   INSERT INTO ambari_sequences (sequence_name, [sequence_value])
@@ -1113,7 +1124,8 @@ BEGIN TRANSACTION
     ('topology_host_group_id_seq', 0),
     ('setting_id_seq', 0),
     ('hostcomponentstate_id_seq', 0),
-    ('servicecomponentdesiredstate_id_seq', 0);
+    ('servicecomponentdesiredstate_id_seq', 0),
+    ('servicecomponent_history_id_seq', 0);
 
   insert into adminresourcetype (resource_type_id, resource_type_name)
   values
@@ -1733,4 +1745,3 @@ BEGIN
 END')
 
 GO
-

http://git-wip-us.apache.org/repos/asf/ambari/blob/e4d1475e/ambari-server/src/main/resources/META-INF/persistence.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/META-INF/persistence.xml b/ambari-server/src/main/resources/META-INF/persistence.xml
index 7fd0391..513035f 100644
--- a/ambari-server/src/main/resources/META-INF/persistence.xml
+++ b/ambari-server/src/main/resources/META-INF/persistence.xml
@@ -63,6 +63,7 @@
     <class>org.apache.ambari.server.orm.entities.ResourceTypeEntity</class>
     <class>org.apache.ambari.server.orm.entities.RoleSuccessCriteriaEntity</class>
     <class>org.apache.ambari.server.orm.entities.ServiceComponentDesiredStateEntity</class>
+    <class>org.apache.ambari.server.orm.entities.ServiceComponentHistoryEntity</class>
     <class>org.apache.ambari.server.orm.entities.ServiceConfigEntity</class>
     <class>org.apache.ambari.server.orm.entities.ServiceDesiredStateEntity</class>
     <class>org.apache.ambari.server.orm.entities.StackEntity</class>

http://git-wip-us.apache.org/repos/asf/ambari/blob/e4d1475e/ambari-server/src/test/java/org/apache/ambari/server/state/ServiceComponentTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/state/ServiceComponentTest.java b/ambari-server/src/test/java/org/apache/ambari/server/state/ServiceComponentTest.java
index 4e8713b..55e7a61 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/state/ServiceComponentTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/state/ServiceComponentTest.java
@@ -18,10 +18,12 @@
 
 package org.apache.ambari.server.state;
 
+import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertNotNull;
 import static org.junit.Assert.fail;
 
 import java.util.HashMap;
+import java.util.List;
 import java.util.Map;
 
 import org.apache.ambari.server.AmbariException;
@@ -34,11 +36,16 @@ import org.apache.ambari.server.orm.dao.HostComponentDesiredStateDAO;
 import org.apache.ambari.server.orm.dao.HostComponentStateDAO;
 import org.apache.ambari.server.orm.dao.HostDAO;
 import org.apache.ambari.server.orm.dao.ServiceComponentDesiredStateDAO;
+import org.apache.ambari.server.orm.dao.UpgradeDAO;
 import org.apache.ambari.server.orm.entities.HostComponentDesiredStateEntity;
 import org.apache.ambari.server.orm.entities.HostComponentDesiredStateEntityPK;
 import org.apache.ambari.server.orm.entities.HostComponentStateEntity;
 import org.apache.ambari.server.orm.entities.HostEntity;
 import org.apache.ambari.server.orm.entities.ServiceComponentDesiredStateEntity;
+import org.apache.ambari.server.orm.entities.ServiceComponentHistoryEntity;
+import org.apache.ambari.server.orm.entities.UpgradeEntity;
+import org.apache.ambari.server.state.stack.upgrade.Direction;
+import org.apache.ambari.server.state.stack.upgrade.UpgradeType;
 import org.junit.After;
 import org.junit.Before;
 import org.junit.Test;
@@ -350,4 +357,137 @@ public class ServiceComponentTest {
       }
     }
   }
+
+  @Test
+  public void testHistoryCreation() throws AmbariException {
+    ServiceComponentDesiredStateDAO serviceComponentDesiredStateDAO = injector.getInstance(
+        ServiceComponentDesiredStateDAO.class);
+
+    String componentName = "NAMENODE";
+    ServiceComponent component = serviceComponentFactory.createNew(service, componentName);
+    service.addServiceComponent(component);
+    component.persist();
+
+    ServiceComponent sc = service.getServiceComponent(componentName);
+    Assert.assertNotNull(sc);
+
+    sc.setDesiredState(State.INSTALLED);
+    Assert.assertEquals(State.INSTALLED, sc.getDesiredState());
+
+    sc.setDesiredStackVersion(new StackId("HDP-2.2.0"));
+    StackId stackId = sc.getDesiredStackVersion();
+    Assert.assertEquals(new StackId("HDP", "2.2.0"), stackId);
+
+    Assert.assertEquals("HDP-2.2.0", sc.getDesiredStackVersion().getStackId());
+
+    ServiceComponentDesiredStateEntity serviceComponentDesiredStateEntity = serviceComponentDesiredStateDAO.findByName(
+        cluster.getClusterId(), serviceName, componentName);
+
+    Assert.assertNotNull(serviceComponentDesiredStateEntity);
+
+    UpgradeEntity upgradeEntity = createUpgradeEntity("2.2.0.0", "2.2.0.1");
+    ServiceComponentHistoryEntity history = new ServiceComponentHistoryEntity();
+    history.setFromStack(serviceComponentDesiredStateEntity.getDesiredStack());
+    history.setToStack(serviceComponentDesiredStateEntity.getDesiredStack());
+    history.setUpgrade(upgradeEntity);
+    history.setServiceComponentDesiredState(serviceComponentDesiredStateEntity);
+    history = serviceComponentDesiredStateDAO.merge(history);
+
+    serviceComponentDesiredStateEntity = serviceComponentDesiredStateDAO.findByName(
+        cluster.getClusterId(), serviceName, componentName);
+
+    Assert.assertEquals(history, serviceComponentDesiredStateEntity.getHistory().iterator().next());
+  }
+
+  /**
+   * Tests the CASCADE nature of removing a service component also removes the
+   * history.
+   *
+   * @throws AmbariException
+   */
+  @Test
+  public void testHistoryRemoval() throws AmbariException {
+    ServiceComponentDesiredStateDAO serviceComponentDesiredStateDAO = injector.getInstance(
+        ServiceComponentDesiredStateDAO.class);
+
+    String componentName = "NAMENODE";
+    ServiceComponent component = serviceComponentFactory.createNew(service, componentName);
+    service.addServiceComponent(component);
+    component.persist();
+
+    ServiceComponent sc = service.getServiceComponent(componentName);
+    Assert.assertNotNull(sc);
+
+    sc.setDesiredState(State.INSTALLED);
+    Assert.assertEquals(State.INSTALLED, sc.getDesiredState());
+
+    sc.setDesiredStackVersion(new StackId("HDP-2.2.0"));
+    StackId stackId = sc.getDesiredStackVersion();
+    Assert.assertEquals(new StackId("HDP", "2.2.0"), stackId);
+
+    Assert.assertEquals("HDP-2.2.0", sc.getDesiredStackVersion().getStackId());
+
+    ServiceComponentDesiredStateEntity serviceComponentDesiredStateEntity = serviceComponentDesiredStateDAO.findByName(
+        cluster.getClusterId(), serviceName, componentName);
+
+    Assert.assertNotNull(serviceComponentDesiredStateEntity);
+
+    UpgradeEntity upgradeEntity = createUpgradeEntity("2.2.0.0", "2.2.0.1");
+    ServiceComponentHistoryEntity history = new ServiceComponentHistoryEntity();
+    history.setFromStack(serviceComponentDesiredStateEntity.getDesiredStack());
+    history.setToStack(serviceComponentDesiredStateEntity.getDesiredStack());
+    history.setUpgrade(upgradeEntity);
+    history.setServiceComponentDesiredState(serviceComponentDesiredStateEntity);
+    history = serviceComponentDesiredStateDAO.merge(history);
+
+    serviceComponentDesiredStateEntity = serviceComponentDesiredStateDAO.findByName(
+        cluster.getClusterId(), serviceName, componentName);
+
+    Assert.assertEquals(history, serviceComponentDesiredStateEntity.getHistory().iterator().next());
+
+    // verify that we can retrieve the history directly
+    List<ServiceComponentHistoryEntity> componentHistoryList = serviceComponentDesiredStateDAO.findHistory(
+        sc.getClusterId(), sc.getServiceName(), sc.getName());
+
+    assertEquals(1, componentHistoryList.size());
+
+    // delete the SC
+    sc.delete();
+
+    // verify history is gone, too
+    serviceComponentDesiredStateEntity = serviceComponentDesiredStateDAO.findByName(
+        cluster.getClusterId(), serviceName, componentName);
+
+    Assert.assertNull(serviceComponentDesiredStateEntity);
+
+    // verify that we cannot retrieve the history directly
+    componentHistoryList = serviceComponentDesiredStateDAO.findHistory(sc.getClusterId(),
+        sc.getServiceName(), sc.getName());
+
+    assertEquals(0, componentHistoryList.size());
+  }
+
+  /**
+   * Creates an upgrade entity, asserting it was created correctly.
+   *
+   * @param fromVersion
+   * @param toVersion
+   * @return
+   */
+  private UpgradeEntity createUpgradeEntity(String fromVersion, String toVersion) {
+    UpgradeDAO upgradeDao = injector.getInstance(UpgradeDAO.class);
+    UpgradeEntity upgradeEntity = new UpgradeEntity();
+    upgradeEntity.setClusterId(cluster.getClusterId());
+    upgradeEntity.setDirection(Direction.UPGRADE);
+    upgradeEntity.setFromVersion(fromVersion);
+    upgradeEntity.setToVersion(toVersion);
+    upgradeEntity.setUpgradePackage("upgrade_test");
+    upgradeEntity.setUpgradeType(UpgradeType.ROLLING);
+    upgradeEntity.setRequestId(1L);
+
+    upgradeDao.create(upgradeEntity);
+    List<UpgradeEntity> upgrades = upgradeDao.findUpgrades(cluster.getClusterId());
+    assertEquals(1, upgrades.size());
+    return upgradeEntity;
+  }
 }


[09/50] [abbrv] ambari git commit: Merge branch 'trunk' into branch-dev-patch-upgrade

Posted by nc...@apache.org.
Merge branch 'trunk' into branch-dev-patch-upgrade


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/73aee31e
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/73aee31e
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/73aee31e

Branch: refs/heads/trunk
Commit: 73aee31ef67348bbfc942bdf719c6952964a82ab
Parents: e7bdb7b 3ab6a3a
Author: Nate Cole <nc...@hortonworks.com>
Authored: Mon Feb 1 10:41:26 2016 -0500
Committer: Nate Cole <nc...@hortonworks.com>
Committed: Mon Feb 1 10:41:26 2016 -0500

----------------------------------------------------------------------
 .../main/resources/ui/admin-web/app/index.html  |   1 +
 .../controllers/clusters/UserAccessListCtrl.js  |   2 +-
 .../loginActivities/LoginMessageMainCtrl.js     |  67 ++
 .../ui/admin-web/app/scripts/i18n.config.js     |   6 +-
 .../resources/ui/admin-web/app/styles/main.css  |   6 +
 .../app/views/clusters/userAccessList.html      |   4 +-
 .../app/views/loginActivities/loginMessage.html |  50 +-
 ambari-agent/conf/unix/ambari-agent             |  13 +
 ambari-agent/etc/init/ambari-agent.conf         |  34 +
 ambari-agent/pom.xml                            | 479 +---------
 ambari-agent/src/packages/tarball/all.xml       | 168 +++-
 .../resource_management/core/global_lock.py     |  46 +
 .../libraries/functions/curl_krb_request.py     |  47 +-
 .../libraries/functions/hive_check.py           |  23 +-
 .../sink/timeline/MetadataException.java        |  28 +
 .../sink/timeline/TimelineMetricMetadata.java   | 163 ++++
 .../timeline/HBaseTimelineMetricStore.java      |  73 +-
 .../metrics/timeline/PhoenixHBaseAccessor.java  | 253 +++++-
 .../timeline/TimelineMetricConfiguration.java   |   9 +
 .../metrics/timeline/TimelineMetricStore.java   |  22 +-
 .../TimelineMetricAggregatorFactory.java        |   5 +-
 .../TimelineMetricAppAggregator.java            |  28 +-
 .../TimelineMetricClusterAggregatorSecond.java  |   5 +-
 .../discovery/TimelineMetricMetadataKey.java    |  56 ++
 .../TimelineMetricMetadataManager.java          | 187 ++++
 .../discovery/TimelineMetricMetadataSync.java   | 105 +++
 .../timeline/query/PhoenixTransactSQL.java      |  37 +-
 .../webapp/TimelineWebServices.java             |  59 +-
 .../TestApplicationHistoryServer.java           |  11 +-
 .../timeline/AbstractMiniHBaseClusterTest.java  |   8 +-
 .../timeline/ITPhoenixHBaseAccessor.java        |   9 +-
 .../timeline/TestTimelineMetricStore.java       |  14 +
 .../aggregators/ITClusterAggregator.java        |  16 +-
 .../timeline/discovery/TestMetadataManager.java | 112 +++
 ambari-server/pom.xml                           | 883 +------------------
 ambari-server/src/main/assemblies/server.xml    | 348 ++++++--
 .../internal/AdminSettingResourceProvider.java  |   5 +
 .../AlertDefinitionResourceProvider.java        |   4 +-
 .../internal/AlertResourceProvider.java         |   6 +-
 .../AmbariAuthorizationFilter.java              |   3 +-
 .../server/upgrade/UpgradeCatalog210.java       |   4 +-
 .../server/upgrade/UpgradeCatalog221.java       |   8 +
 .../server/upgrade/UpgradeCatalog222.java       |  15 +
 .../HAWQ/2.0.0/configuration/hawq-site.xml      |  14 +
 .../package/alerts/alert_hive_metastore.py      |  15 +-
 .../package/alerts/alert_webhcat_server.py      |   6 -
 .../package/alerts/alert_check_oozie_server.py  |  15 +-
 .../OOZIE/4.0.0.2.0/package/scripts/oozie.py    |   2 +-
 .../SPARK/1.2.0.2.2/package/scripts/params.py   |   2 +-
 .../YARN/2.1.0.2.0/kerberos.json                |   4 +-
 .../services/HIVE/configuration/hive-site.xml   |   2 -
 .../stacks/HDP/2.2/services/YARN/kerberos.json  |   4 +-
 .../HDP/2.3/services/ACCUMULO/widgets.json      |   4 +-
 .../stacks/HDP/2.3/services/YARN/kerberos.json  |   4 +-
 .../stacks/HDP/2.3/services/stack_advisor.py    |   9 +-
 .../HDP/2.3/upgrades/nonrolling-upgrade-2.4.xml |   5 +
 .../stacks/HDP/2.3/upgrades/upgrade-2.4.xml     |  80 +-
 .../SPARK/configuration/spark-defaults.xml      |   4 +-
 .../configuration/spark-thrift-sparkconf.xml    |  31 +-
 .../stacks/HDP/2.4/upgrades/config-upgrade.xml  |  26 +-
 .../AdminSettingResourceProviderTest.java       |  15 +-
 .../server/orm/dao/AdminSettingDAOTest.java     |   4 +
 .../ambari/server/orm/dao/AlertsDAOTest.java    |  38 +
 .../server/upgrade/UpgradeCatalog221Test.java   |  47 +
 .../server/upgrade/UpgradeCatalog222Test.java   |   6 +-
 ambari-server/src/test/python/TestGlobalLock.py |  55 ++
 .../stacks/2.0.6/OOZIE/test_oozie_client.py     |   6 +-
 .../stacks/2.0.6/OOZIE/test_oozie_server.py     | 117 ++-
 .../stacks/2.0.6/OOZIE/test_service_check.py    |   4 -
 .../common/services-normal-hawq-3-hosts.json    |  11 +
 .../stacks/2.3/common/test_stack_advisor.py     |  34 +
 ambari-server/src/test/python/unitTests.py      |  14 +-
 .../main/admin/stack_and_upgrade_controller.js  |  22 +-
 .../main/alert_definitions_controller.js        |  41 +
 .../controllers/main/host/combo_search_box.js   |  30 +-
 ambari-web/app/controllers/main/service/item.js |   2 +-
 .../app/controllers/wizard/step0_controller.js  |   2 +-
 .../app/controllers/wizard/step3_controller.js  |   3 +-
 .../app/controllers/wizard/step7_controller.js  |   2 +-
 .../mappers/alert_definition_summary_mapper.js  |   8 +-
 ambari-web/app/messages.js                      |   2 +
 .../main/service/configs/config_overridable.js  |   3 +-
 .../app/models/alerts/alert_definition.js       |   1 +
 ambari-web/app/models/host_component.js         |   3 +-
 ambari-web/app/models/service.js                |  43 +
 .../stack_upgrade/stack_upgrade_wizard.hbs      |   8 +-
 ambari-web/app/templates/main/alerts.hbs        |  32 +-
 .../alert_definition/alert_definition_state.hbs |  31 +
 .../alert_definition_summary.hbs                |  28 +
 ambari-web/app/utils/ajax/ajax.js               |  19 +
 ambari-web/app/utils/ember_reopen.js            |  14 +
 ambari-web/app/utils/validator.js               |   2 +-
 ambari-web/app/views.js                         |   2 +
 .../app/views/common/quick_view_link_view.js    |   3 +
 ambari-web/app/views/common/sort_view.js        |  19 +-
 ambari-web/app/views/common/table_view.js       |  24 +-
 .../app/views/main/alert_definitions_view.js    |  23 +-
 .../alert_definition/alert_definition_state.js  |  34 +
 .../alert_definition_summary.js                 |  65 ++
 .../views/main/dashboard/config_history_view.js |  12 +
 .../app/views/main/host/combo_search_box.js     |   1 +
 .../test/controllers/main/service/item_test.js  |  13 +-
 ambari-web/test/utils/validator_test.js         |   1 +
 .../test/views/common/quick_link_view_test.js   |   5 +
 ambari-web/vendor/scripts/visualsearch.js       |   2 +-
 105 files changed, 2726 insertions(+), 1774 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/73aee31e/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog221.java
----------------------------------------------------------------------


[37/50] [abbrv] ambari git commit: Revert "Merge with trunk"

Posted by nc...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/4d3839c7/ambari-web/test/views/main/dashboard/widgets/hawqsegment_live_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/views/main/dashboard/widgets/hawqsegment_live_test.js b/ambari-web/test/views/main/dashboard/widgets/hawqsegment_live_test.js
deleted file mode 100644
index eb8a505..0000000
--- a/ambari-web/test/views/main/dashboard/widgets/hawqsegment_live_test.js
+++ /dev/null
@@ -1,69 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-var App = require('app');
-
-require('views/main/dashboard/widget');
-require('views/main/dashboard/widgets/text_widget');
-require('views/main/dashboard/widgets/hawqsegment_live');
-
-describe('App.HawqSegmentUpView', function() {
-
-  var tests = [
-    {
-      data: 100,
-      e: {
-        isRed: false,
-        isOrange: false,
-        isGreen: true
-      }
-    },
-    {
-      data: 0,
-      e: {
-        isRed: true,
-        isOrange: false,
-        isGreen: false
-      }
-    },
-    {
-      data: 50,
-      e: {
-        isRed: false,
-        isOrange: true,
-        isGreen: false
-      }
-    }
-  ];
-
-  tests.forEach(function(test) {
-    describe('', function() {
-      var hawqSegmentUpView = App.HawqSegmentUpView.create({model_type:null, data: test.data, content: test.data.toString()});
-      it('shows red', function() {
-        expect(hawqSegmentUpView.get('isRed')).to.equal(test.e.isRed);
-      });
-      it('shows orange', function() {
-        expect(hawqSegmentUpView.get('isOrange')).to.equal(test.e.isOrange);
-      });
-      it('shows green', function() {
-        expect(hawqSegmentUpView.get('isGreen')).to.equal(test.e.isGreen);
-      });
-    });
-  });
-
-});


[50/50] [abbrv] ambari git commit: Merge branch 'trunk' into branch-dev-patch-upgrade

Posted by nc...@apache.org.
Merge branch 'trunk' into branch-dev-patch-upgrade


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/db999ae8
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/db999ae8
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/db999ae8

Branch: refs/heads/trunk
Commit: db999ae82c148dd69340fb40efff4d0b1c06fa04
Parents: a5b673d 6e002b2
Author: Nate Cole <nc...@hortonworks.com>
Authored: Wed Feb 24 09:23:53 2016 -0500
Committer: Nate Cole <nc...@hortonworks.com>
Committed: Wed Feb 24 09:23:53 2016 -0500

----------------------------------------------------------------------
 .../resources/ui/admin-web/app/scripts/app.js   |   3 +-
 .../app/scripts/controllers/NavbarCtrl.js       |   3 +-
 .../authentication/AuthenticationMainCtrl.js    |   3 +-
 .../ui/admin-web/app/scripts/routes.js          |   7 +-
 .../app/views/authentication/main.html          |   2 +-
 .../ui/admin-web/app/views/leftNavbar.html      |   2 +-
 .../pluggable_stack_definition/configs/PHD.json | 167 ++++++++++++++++++-
 .../conf/unix/metric_monitor.ini                |   7 +-
 .../src/main/python/core/config_reader.py       |  36 +++-
 .../src/main/python/core/controller.py          |   1 -
 .../src/main/python/core/emitter.py             |  34 ++--
 .../src/main/python/core/security.py            |  98 +++++++++++
 .../src/test/python/core/TestEmitter.py         |  65 ++++----
 .../AMBARI_METRICS/0.1.0/package/scripts/ams.py |  40 ++++-
 .../0.1.0/package/scripts/functions.py          |   4 -
 .../package/scripts/metrics_grafana_util.py     |  58 +++++--
 .../0.1.0/package/scripts/network.py            |  39 +++++
 .../0.1.0/package/scripts/params.py             |   4 +-
 .../0.1.0/package/scripts/service_check.py      |  24 ++-
 .../package/templates/metric_monitor.ini.j2     |   5 +-
 .../metrics_grafana_datasource.json.j2          |   2 +-
 .../AMBARI_METRICS/test_metrics_collector.py    |  10 ++
 .../AMBARI_METRICS/test_metrics_grafana.py      |  12 +-
 .../python/stacks/2.0.6/configs/default.json    |  10 ++
 ambari-server/src/test/python/unitTests.py      |   8 +
 ambari-web/app/router.js                        |   5 +-
 ambari-web/app/styles/application.less          |  15 +-
 .../app/styles/enhanced_service_dashboard.less  |   8 +-
 .../app/templates/common/chart/linear_time.hbs  |   4 +-
 .../app/templates/main/charts/linear_time.hbs   |   1 +
 .../app/views/common/chart/linear_time.js       |  48 +++++-
 .../views/common/widget/graph_widget_view.js    |  30 ++++
 .../dashboard/widgets/cluster_metrics_widget.js |   2 +
 .../test/views/common/chart/linear_time_test.js |  90 +++++++---
 34 files changed, 709 insertions(+), 138 deletions(-)
----------------------------------------------------------------------



[46/50] [abbrv] ambari git commit: AMBARI-14996. Component should support a desired version (dlysnichenko)

Posted by nc...@apache.org.
AMBARI-14996. Component should support a desired version (dlysnichenko)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/cbef0c14
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/cbef0c14
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/cbef0c14

Branch: refs/heads/trunk
Commit: cbef0c14682ae24f2754063c48ce521c58cdca27
Parents: 50547c5
Author: Lisnichenko Dmitro <dl...@hortonworks.com>
Authored: Tue Feb 23 17:17:27 2016 +0200
Committer: Lisnichenko Dmitro <dl...@hortonworks.com>
Committed: Tue Feb 23 17:18:57 2016 +0200

----------------------------------------------------------------------
 .gitignore                                      |   3 +
 .../ambari/server/agent/HeartBeatHandler.java   |  47 +-----
 .../AmbariCustomCommandExecutionHelper.java     |   2 +-
 .../RepositoryVersionResourceProvider.java      |   4 +-
 .../internal/UpgradeResourceProvider.java       |  37 ++++-
 .../HostComponentVersionAdvertisedEvent.java    |  73 +++++++++
 .../events/HostComponentVersionEvent.java       |  65 --------
 .../listeners/upgrade/StackVersionListener.java |  99 +++++++++++-
 .../publishers/VersionEventPublisher.java       |   8 +-
 .../ambari/server/orm/dao/UpgradeDAO.java       |   8 +-
 .../server/orm/entities/ClusterEntity.java      |  15 ++
 .../orm/entities/HostComponentStateEntity.java  |   5 +-
 .../ServiceComponentDesiredStateEntity.java     |  15 ++
 .../upgrades/FinalizeUpgradeAction.java         |  38 ++---
 .../ambari/server/stack/MasterHostResolver.java |   3 +-
 .../org/apache/ambari/server/state/Cluster.java |  15 ++
 .../server/state/RepositoryVersionState.java    |  39 +----
 .../ambari/server/state/ServiceComponent.java   |   4 +
 .../server/state/ServiceComponentHost.java      |  83 +++++-----
 .../server/state/ServiceComponentImpl.java      |  28 ++++
 .../ambari/server/state/UpgradeHelper.java      |  22 +++
 .../ambari/server/state/UpgradeState.java       |  22 ++-
 .../server/state/cluster/ClusterImpl.java       | 135 ++++++++--------
 .../svccomphost/ServiceComponentHostImpl.java   |   7 +-
 .../ServiceComponentHostSummary.java            |  53 +++----
 .../main/resources/Ambari-DDL-Derby-CREATE.sql  | 109 ++++++-------
 .../main/resources/Ambari-DDL-MySQL-CREATE.sql  | 108 ++++++-------
 .../main/resources/Ambari-DDL-Oracle-CREATE.sql | 109 ++++++-------
 .../resources/Ambari-DDL-Postgres-CREATE.sql    | 109 ++++++-------
 .../Ambari-DDL-Postgres-EMBEDDED-CREATE.sql     | 117 +++++++-------
 .../resources/Ambari-DDL-SQLAnywhere-CREATE.sql | 105 +++++++------
 .../resources/Ambari-DDL-SQLServer-CREATE.sql   | 109 ++++++-------
 .../ambari/server/StateRecoveryManagerTest.java |   6 -
 .../server/agent/TestHeartbeatHandler.java      |   2 +-
 .../server/agent/TestHeartbeatMonitor.java      |  10 +-
 .../AmbariManagementControllerTest.java         |  18 +--
 .../StackDefinedPropertyProviderTest.java       |   2 +-
 .../UpgradeResourceProviderHDP22Test.java       |   2 +-
 .../internal/UpgradeResourceProviderTest.java   |   2 +-
 .../apache/ambari/server/events/EventsTest.java |   2 +-
 .../HostVersionOutOfSyncListenerTest.java       |  10 +-
 .../upgrade/StackVersionListenerTest.java       |  25 ++-
 .../publishers/VersionEventPublisherTest.java   |  10 +-
 .../apache/ambari/server/orm/OrmTestHelper.java |   2 +-
 .../server/orm/dao/ClusterVersionDAOTest.java   |  34 ++--
 .../server/orm/dao/HostVersionDAOTest.java      |  16 +-
 .../ambari/server/orm/dao/UpgradeDAOTest.java   |   2 +-
 .../ComponentVersionCheckActionTest.java        |  18 +--
 .../upgrades/ConfigureActionTest.java           |   6 +-
 .../upgrades/UpgradeActionTest.java             |  36 ++---
 .../server/state/ServiceComponentTest.java      |   2 +-
 .../ambari/server/state/UpgradeHelperTest.java  |   8 +-
 .../state/cluster/ClusterDeadlockTest.java      |   2 +-
 .../server/state/cluster/ClusterTest.java       | 155 +------------------
 .../state/cluster/ClustersDeadlockTest.java     |   2 +-
 .../server/state/cluster/ClustersTest.java      |  10 +-
 .../ConcurrentServiceConfigVersionTest.java     |   2 +-
 ...omponentHostConcurrentWriteDeadlockTest.java |   2 +-
 .../ambari/server/state/host/HostTest.java      |   4 +-
 .../svccomphost/ServiceComponentHostTest.java   |  10 +-
 60 files changed, 1039 insertions(+), 957 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/cbef0c14/.gitignore
----------------------------------------------------------------------
diff --git a/.gitignore b/.gitignore
index a8c4aab..d9f23de 100644
--- a/.gitignore
+++ b/.gitignore
@@ -21,3 +21,6 @@ pass.txt
 ambari-agent/src/test/python/ambari_agent/dummy_files/current-stack
 velocity.log*
 ambari-metrics/ambari-metrics-host-monitoring/src/main/python/psutil/build/
+rebel.xml
+rebel-remote.xml
+out

http://git-wip-us.apache.org/repos/asf/ambari/blob/cbef0c14/ambari-server/src/main/java/org/apache/ambari/server/agent/HeartBeatHandler.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/agent/HeartBeatHandler.java b/ambari-server/src/main/java/org/apache/ambari/server/agent/HeartBeatHandler.java
index 248ce4b..a4136ee 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/agent/HeartBeatHandler.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/agent/HeartBeatHandler.java
@@ -47,7 +47,7 @@ import org.apache.ambari.server.controller.MaintenanceStateHelper;
 import org.apache.ambari.server.events.ActionFinalReportReceivedEvent;
 import org.apache.ambari.server.events.AlertEvent;
 import org.apache.ambari.server.events.AlertReceivedEvent;
-import org.apache.ambari.server.events.HostComponentVersionEvent;
+import org.apache.ambari.server.events.HostComponentVersionAdvertisedEvent;
 import org.apache.ambari.server.events.publishers.AlertEventPublisher;
 import org.apache.ambari.server.events.publishers.AmbariEventPublisher;
 import org.apache.ambari.server.events.publishers.VersionEventPublisher;
@@ -562,10 +562,8 @@ public class HeartBeatHandler {
 
               String newVersion = structuredOutput == null ? null : structuredOutput.version;
 
-              // Pass true to always publish a version event.  It is safer to recalculate the version even if we don't
-              // detect a difference in the value.  This is useful in case that a manual database edit is done while
-              // ambari-server is stopped.
-              handleComponentVersionReceived(cl, scHost, newVersion, true);
+              HostComponentVersionAdvertisedEvent event = new HostComponentVersionAdvertisedEvent(cl, scHost, newVersion);
+              versionEventPublisher.publish(event);
             }
 
             // Updating stack version, if needed (this is not actually for express/rolling upgrades!)
@@ -618,7 +616,8 @@ public class HeartBeatHandler {
               try {
                 ComponentVersionStructuredOut structuredOutput = gson.fromJson(report.getStructuredOut(), ComponentVersionStructuredOut.class);
 
-                if (null != structuredOutput.upgradeDirection && structuredOutput.upgradeDirection.isUpgrade()) {
+                if (null != structuredOutput.upgradeDirection) {
+                  // TODO: backward compatibility: now state is set to FAILED also during downgrade
                   scHost.setUpgradeState(UpgradeState.FAILED);
                 }
               } catch (JsonSyntaxException ex) {
@@ -726,7 +725,8 @@ public class HeartBeatHandler {
                   if (extra.containsKey("version")) {
                     String version = extra.get("version").toString();
 
-                    handleComponentVersionReceived(cl, scHost, version, false);
+                    HostComponentVersionAdvertisedEvent event = new HostComponentVersionAdvertisedEvent(cl, scHost, version);
+                    versionEventPublisher.publish(event);
                   }
 
                 } catch (Exception e) {
@@ -782,39 +782,6 @@ public class HeartBeatHandler {
   }
 
   /**
-   * Updates the version of the given service component, sets the upgrade state (if needed)
-   * and publishes a version event through the version event publisher.
-   *
-   * @param cluster        the cluster
-   * @param scHost         service component host
-   * @param newVersion     new version of service component
-   * @param alwaysPublish  if true, always publish a version event; if false,
-   *                       only publish if the component version was updated
-   */
-  private void handleComponentVersionReceived(Cluster cluster, ServiceComponentHost scHost,
-                                              String newVersion, boolean alwaysPublish) {
-
-    boolean updated = false;
-
-    if (StringUtils.isNotBlank(newVersion)) {
-      final String previousVersion = scHost.getVersion();
-      if (!StringUtils.equals(previousVersion, newVersion)) {
-        scHost.setVersion(newVersion);
-        scHost.setStackVersion(cluster.getDesiredStackVersion());
-        if (previousVersion != null && !previousVersion.equalsIgnoreCase(State.UNKNOWN.toString())) {
-          scHost.setUpgradeState(UpgradeState.COMPLETE);
-        }
-        updated = true;
-      }
-    }
-
-    if (updated || alwaysPublish) {
-      HostComponentVersionEvent event = new HostComponentVersionEvent(cluster, scHost);
-      versionEventPublisher.publish(event);
-    }
-  }
-
-  /**
    * Adds commands from action queue to a heartbeat response.
    */
   protected void sendCommands(String hostname, HeartBeatResponse response)

http://git-wip-us.apache.org/repos/asf/ambari/blob/cbef0c14/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelper.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelper.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelper.java
index b62f4d1..24728bf 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelper.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelper.java
@@ -1152,7 +1152,7 @@ public class AmbariCustomCommandExecutionHelper {
     ClusterVersionEntity clusterVersionEntity = clusterVersionDAO.findByClusterAndStateCurrent(cluster.getClusterName());
     if (clusterVersionEntity == null) {
       List<ClusterVersionEntity> clusterVersionEntityList = clusterVersionDAO
-              .findByClusterAndState(cluster.getClusterName(), RepositoryVersionState.UPGRADING);
+              .findByClusterAndState(cluster.getClusterName(), RepositoryVersionState.INSTALLING);
       if (!clusterVersionEntityList.isEmpty()) {
         clusterVersionEntity = clusterVersionEntityList.iterator().next();
       }

http://git-wip-us.apache.org/repos/asf/ambari/blob/cbef0c14/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/RepositoryVersionResourceProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/RepositoryVersionResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/RepositoryVersionResourceProvider.java
index 87731c4..c298e0a 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/RepositoryVersionResourceProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/RepositoryVersionResourceProvider.java
@@ -403,9 +403,7 @@ public class RepositoryVersionResourceProvider extends AbstractAuthorizedResourc
       final List<RepositoryVersionState> forbiddenToDeleteStates = Lists.newArrayList(
           RepositoryVersionState.CURRENT,
           RepositoryVersionState.INSTALLED,
-          RepositoryVersionState.INSTALLING,
-          RepositoryVersionState.UPGRADED,
-          RepositoryVersionState.UPGRADING);
+          RepositoryVersionState.INSTALLING);
       for (ClusterVersionEntity clusterVersionEntity : clusterVersionEntities) {
         if (clusterVersionEntity.getRepositoryVersion().getId().equals(id) && forbiddenToDeleteStates.contains(clusterVersionEntity.getState())) {
           throw new SystemException("Repository version can't be deleted as it is " +

http://git-wip-us.apache.org/repos/asf/ambari/blob/cbef0c14/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/UpgradeResourceProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/UpgradeResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/UpgradeResourceProvider.java
index 0190014..08665dd 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/UpgradeResourceProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/UpgradeResourceProvider.java
@@ -84,6 +84,9 @@ import org.apache.ambari.server.state.Clusters;
 import org.apache.ambari.server.state.Config;
 import org.apache.ambari.server.state.ConfigHelper;
 import org.apache.ambari.server.state.DesiredConfig;
+import org.apache.ambari.server.state.RepositoryType;
+import org.apache.ambari.server.state.Service;
+import org.apache.ambari.server.state.ServiceComponent;
 import org.apache.ambari.server.state.ServiceInfo;
 import org.apache.ambari.server.state.StackId;
 import org.apache.ambari.server.state.StackInfo;
@@ -813,6 +816,20 @@ public class UpgradeResourceProvider extends AbstractControllerResourceProvider
       configUpgradePack = ConfigUpgradePack.merge(intermediateConfigUpgradePacks);
     }
 
+    // TODO: for now, all service components are transitioned to upgrading state
+    // TODO: When performing patch upgrade, we should only target supported services/components
+    // from upgrade pack
+    Set<Service> services = new HashSet<>(cluster.getServices().values());
+    Map<Service, Set<ServiceComponent>> targetComponents = new HashMap<>();
+    for (Service service: services) {
+      Set<ServiceComponent> serviceComponents =
+        new HashSet<>(service.getServiceComponents().values());
+      targetComponents.put(service, serviceComponents);
+    }
+    // TODO: is there any extreme case when we need to set component upgrade state back to NONE
+    // from IN_PROGRESS (e.g. canceled downgrade)
+    s_upgradeHelper.putComponentsToUpgradingState(version, targetComponents);
+
     for (UpgradeGroupHolder group : groups) {
       boolean skippable = group.skippable;
       boolean supportsAutoSkipOnFailure = group.supportsAutoSkipOnFailure;
@@ -894,6 +911,7 @@ public class UpgradeResourceProvider extends AbstractControllerResourceProvider
     req.persist();
 
     s_upgradeDAO.create(entity);
+    cluster.setUpgradeEntity(entity);
 
     return entity;
   }
@@ -1600,11 +1618,19 @@ public class UpgradeResourceProvider extends AbstractControllerResourceProvider
               HostRoleStatus.ABORTED, internalStatus));
     }
 
+    Long clusterId = internalRequest.getClusterId();
     if (HostRoleStatus.ABORTED == status) {
       if (!internalStatus.isCompletedState()) {
         actionManager.cancelRequest(internalRequest.getRequestId(), reason);
+        // Remove relevant upgrade entity
+        try {
+          Cluster cluster = clusters.get().getClusterById(clusterId);
+          cluster.setUpgradeEntity(null);
+        } catch (AmbariException e) {
+          LOG.warn("Could not clear upgrade entity for cluster with id {}", clusterId, e);
+        }
       }
-    } else {
+    } else { // Processing PENDING
       List<Long> taskIds = new ArrayList<Long>();
 
       for (HostRoleCommand hrc : internalRequest.getCommands()) {
@@ -1615,6 +1641,15 @@ public class UpgradeResourceProvider extends AbstractControllerResourceProvider
       }
 
       actionManager.resubmitTasks(taskIds);
+
+      try {
+        Cluster cluster = clusters.get().getClusterById(clusterId);
+        UpgradeEntity lastUpgradeItemForCluster = s_upgradeDAO.findLastUpgradeForCluster(cluster.getClusterId());
+        cluster.setUpgradeEntity(lastUpgradeItemForCluster);
+      } catch (AmbariException e) {
+        LOG.warn("Could not clear upgrade entity for cluster with id {}", clusterId, e);
+      }
+
     }
   }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/cbef0c14/ambari-server/src/main/java/org/apache/ambari/server/events/HostComponentVersionAdvertisedEvent.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/events/HostComponentVersionAdvertisedEvent.java b/ambari-server/src/main/java/org/apache/ambari/server/events/HostComponentVersionAdvertisedEvent.java
new file mode 100644
index 0000000..11ca72b
--- /dev/null
+++ b/ambari-server/src/main/java/org/apache/ambari/server/events/HostComponentVersionAdvertisedEvent.java
@@ -0,0 +1,73 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.server.events;
+
+import org.apache.ambari.server.state.Cluster;
+import org.apache.ambari.server.state.ServiceComponentHost;
+
+/**
+ * The {@link HostComponentVersionAdvertisedEvent}
+ * occurs when a Host Component advertises it's current version value.
+ */
+public class HostComponentVersionAdvertisedEvent extends ClusterEvent {
+
+  protected Cluster cluster;
+  protected ServiceComponentHost sch;
+  protected String version;
+
+  /**
+   * Constructor.
+   *
+   * @param cluster: cluster.
+   * @param sch: the service component host
+   */
+  public HostComponentVersionAdvertisedEvent(Cluster cluster, ServiceComponentHost sch,
+                                             String version) {
+    super(AmbariEventType.HOST_COMPONENT_VERSION_ADVERTISED, cluster.getClusterId());
+    this.cluster = cluster;
+    this.sch = sch;
+    this.version = version;
+  }
+
+  public ServiceComponentHost getServiceComponentHost() {
+    return sch;
+  }
+
+  public Cluster getCluster() {
+    return cluster;
+  }
+
+  public String getVersion() {
+    return version;
+  }
+
+  /**
+   * {@inheritDoc}
+   */
+  @Override
+  public String toString() {
+    StringBuilder buffer = new StringBuilder("HostComponentVersionAdvertisedEvent{");
+    buffer.append("cluserId=").append(m_clusterId);
+    buffer.append(", serviceName=").append(sch.getServiceName());
+    buffer.append(", componentName=").append(sch.getServiceComponentName());
+    buffer.append(", hostName=").append(sch.getHostName());
+    buffer.append(", version=").append(version);
+    buffer.append("}");
+    return buffer.toString();
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/cbef0c14/ambari-server/src/main/java/org/apache/ambari/server/events/HostComponentVersionEvent.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/events/HostComponentVersionEvent.java b/ambari-server/src/main/java/org/apache/ambari/server/events/HostComponentVersionEvent.java
deleted file mode 100644
index ee65d3d..0000000
--- a/ambari-server/src/main/java/org/apache/ambari/server/events/HostComponentVersionEvent.java
+++ /dev/null
@@ -1,65 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.ambari.server.events;
-
-import org.apache.ambari.server.state.Cluster;
-import org.apache.ambari.server.state.ServiceComponentHost;
-
-/**
- * The {@link org.apache.ambari.server.events.HostComponentVersionEvent} represents all events in Ambari that relate
- * to a Host Component advertising a version.
- */
-public class HostComponentVersionEvent extends ClusterEvent {
-
-  protected Cluster cluster;
-  protected ServiceComponentHost sch;
-
-  /**
-   * Constructor.
-   *
-   * @param cluster: cluster.
-   * @param sch: the service component host
-   */
-  public HostComponentVersionEvent(Cluster cluster, ServiceComponentHost sch) {
-    super(AmbariEventType.HOST_COMPONENT_VERSION_ADVERTISED, cluster.getClusterId());
-    this.cluster = cluster;
-    this.sch = sch;
-  }
-
-  public ServiceComponentHost getServiceComponentHost() {
-    return sch;
-  }
-
-  public Cluster getCluster() {
-    return cluster;
-  }
-
-  /**
-   * {@inheritDoc}
-   */
-  @Override
-  public String toString() {
-    StringBuilder buffer = new StringBuilder("HostComponentVersionEvent{");
-    buffer.append("cluserId=").append(m_clusterId);
-    buffer.append(", serviceName=").append(sch.getServiceName());
-    buffer.append(", componentName=").append(sch.getServiceComponentName());
-    buffer.append(", hostName=").append(sch.getHostName());
-    buffer.append("}");
-    return buffer.toString();
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/cbef0c14/ambari-server/src/main/java/org/apache/ambari/server/events/listeners/upgrade/StackVersionListener.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/events/listeners/upgrade/StackVersionListener.java b/ambari-server/src/main/java/org/apache/ambari/server/events/listeners/upgrade/StackVersionListener.java
index 74d4f4b..e2a7795 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/events/listeners/upgrade/StackVersionListener.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/events/listeners/upgrade/StackVersionListener.java
@@ -20,13 +20,17 @@ package org.apache.ambari.server.events.listeners.upgrade;
 import java.util.concurrent.locks.Lock;
 import java.util.concurrent.locks.ReentrantLock;
 
+import org.apache.ambari.server.AmbariException;
 import org.apache.ambari.server.EagerSingleton;
-import org.apache.ambari.server.events.HostComponentVersionEvent;
+import org.apache.ambari.server.events.HostComponentVersionAdvertisedEvent;
 import org.apache.ambari.server.events.publishers.VersionEventPublisher;
 import org.apache.ambari.server.orm.entities.RepositoryVersionEntity;
 import org.apache.ambari.server.state.Cluster;
+import org.apache.ambari.server.state.ServiceComponent;
 import org.apache.ambari.server.state.ServiceComponentHost;
-import org.apache.ambari.server.state.StackId;
+import org.apache.ambari.server.state.State;
+import org.apache.ambari.server.state.UpgradeState;
+import org.apache.commons.lang.StringUtils;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -69,19 +73,35 @@ public class StackVersionListener {
 
   @Subscribe
   @AllowConcurrentEvents
-  public void onAmbariEvent(HostComponentVersionEvent event) {
+  public void onAmbariEvent(HostComponentVersionAdvertisedEvent event) {
     LOG.debug("Received event {}", event);
 
     Cluster cluster = event.getCluster();
 
     ServiceComponentHost sch = event.getServiceComponentHost();
+    String newVersion = event.getVersion();
 
     m_stackVersionLock.lock();
 
+    // Update host component version value if needed
     try {
-      RepositoryVersionEntity repoVersion = sch.recalculateHostVersionState();
-      if (null != repoVersion) {
-        cluster.recalculateClusterVersionState(repoVersion);
+      ServiceComponent sc = cluster.getService(sch.getServiceName()).getServiceComponent(sch.getServiceComponentName());
+      if (newVersion == null) {
+        processComponentVersionNotAdvertised(sch);
+      } else if (sc.getDesiredVersion().equals(State.UNKNOWN.toString())) {
+        processUnknownDesiredVersion(cluster, sc, sch, newVersion);
+      } else if (StringUtils.isNotBlank(newVersion)) {
+        String previousVersion = sch.getVersion();
+        String unknownVersion = State.UNKNOWN.toString();
+        if (previousVersion == null || previousVersion.equalsIgnoreCase(unknownVersion)) {
+          // value may be "UNKNOWN" when upgrading from older Ambari versions
+          // or if host component reports it's version for the first time
+          sch.setUpgradeState(UpgradeState.NONE);
+          sch.setVersion(newVersion);
+          bootstrapVersion(cluster, sch);
+        } else if (!StringUtils.equals(previousVersion, newVersion)) { //
+          processComponentVersionChange(cluster, sc, sch, newVersion);
+        }
       }
     } catch (Exception e) {
       LOG.error(
@@ -91,4 +111,71 @@ public class StackVersionListener {
       m_stackVersionLock.unlock();
     }
   }
+
+  /**
+   * Bootstrap cluster/repo version when version is reported for the first time
+   * @param cluster target cluster
+   * @param sch target host component
+   * @throws AmbariException
+   */
+  private void bootstrapVersion(Cluster cluster, ServiceComponentHost sch) throws AmbariException {
+    RepositoryVersionEntity repoVersion = sch.recalculateHostVersionState();
+    if (null != repoVersion) {
+      cluster.recalculateClusterVersionState(repoVersion);
+    }
+  }
+
+  /**
+   * Possible situation after upgrade from older Ambari version. Just use
+   * reported component version as desired version
+   * @param cluster target cluster
+   * @param sc target service component
+   * @param sch target host component
+   * @param newVersion advertised version
+   */
+  private void processUnknownDesiredVersion(Cluster cluster, ServiceComponent sc,
+                                            ServiceComponentHost sch,
+                                            String newVersion) throws AmbariException {
+    sc.setDesiredVersion(newVersion);
+    sch.setUpgradeState(UpgradeState.NONE);
+    sch.setVersion(newVersion);
+    bootstrapVersion(cluster, sch);
+  }
+
+  /**
+   * Focuses on cases when host component version really changed
+   * @param cluster target cluster
+   * @param sc target service component
+   * @param sch target host component
+   * @param newVersion advertised version
+   */
+  private void processComponentVersionChange(Cluster cluster, ServiceComponent sc,
+                                             ServiceComponentHost sch,
+                                             String newVersion) {
+    if (sch.getUpgradeState().equals(UpgradeState.IN_PROGRESS)) {
+      // Component status update is received during upgrade process
+      if (sc.getDesiredVersion().equals(newVersion)) {
+        sch.setUpgradeState(UpgradeState.COMPLETE);  // Component upgrade confirmed
+        sch.setStackVersion(cluster.getDesiredStackVersion());
+      } else { // Unexpected (wrong) version received
+        // Even during failed upgrade, we should not receive wrong version
+        // That's why mark as VERSION_MISMATCH
+        sch.setUpgradeState(UpgradeState.VERSION_MISMATCH);
+      }
+    } else { // No upgrade in progress, unexpected version change
+      sch.setUpgradeState(UpgradeState.VERSION_MISMATCH);
+    }
+    sch.setVersion(newVersion);
+  }
+
+  /**
+   * Focuses on cases when component does not advertise it's version
+   */
+  private void processComponentVersionNotAdvertised(ServiceComponentHost sch) {
+    if (UpgradeState.ONGOING_UPGRADE_STATES.contains(sch.getUpgradeState())) {
+      sch.setUpgradeState(UpgradeState.FAILED);
+    } else {
+      sch.setUpgradeState(UpgradeState.VERSION_MISMATCH);
+    }
+  }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/cbef0c14/ambari-server/src/main/java/org/apache/ambari/server/events/publishers/VersionEventPublisher.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/events/publishers/VersionEventPublisher.java b/ambari-server/src/main/java/org/apache/ambari/server/events/publishers/VersionEventPublisher.java
index 3a11f38..5b32c4e 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/events/publishers/VersionEventPublisher.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/events/publishers/VersionEventPublisher.java
@@ -20,11 +20,11 @@ package org.apache.ambari.server.events.publishers;
 
 import com.google.common.eventbus.EventBus;
 import com.google.inject.Singleton;
-import org.apache.ambari.server.events.HostComponentVersionEvent;
+import org.apache.ambari.server.events.HostComponentVersionAdvertisedEvent;
 
 /**
  * The {@link VersionEventPublisher} is used to publish instances of
- * {@link HostComponentVersionEvent} to any {@link com.google.common.eventbus.Subscribe} interested.
+ * {@link HostComponentVersionAdvertisedEvent} to any {@link com.google.common.eventbus.Subscribe} interested.
  * It uses a single-threaded, serial {@link EventBus}.
  */
 @Singleton
@@ -44,11 +44,11 @@ public class VersionEventPublisher {
   /**
    * Publishes the specified event to all registered listeners that
    * {@link com.google.common.eventbus.Subscribe} to any of the
-   * {@link org.apache.ambari.server.events.HostComponentVersionEvent} instances.
+   * {@link HostComponentVersionAdvertisedEvent} instances.
    *
    * @param event the event
    */
-  public void publish(HostComponentVersionEvent event) {
+  public void publish(HostComponentVersionAdvertisedEvent event) {
     m_eventBus.post(event);
   }
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/cbef0c14/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/UpgradeDAO.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/UpgradeDAO.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/UpgradeDAO.java
index 06f6ac1..4a923be 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/UpgradeDAO.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/UpgradeDAO.java
@@ -104,7 +104,13 @@ public class UpgradeDAO {
    */
   @Transactional
   public void create(UpgradeEntity entity) {
-    entityManagerProvider.get().persist(entity);
+    EntityManager entityManager = entityManagerProvider.get();
+    // This is required because since none of the entities
+    // for the request are actually persisted yet,
+    // JPA ordering could allow foreign key entities
+    // to be created after this statement.
+    entityManager.flush();
+    entityManager.persist(entity);
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/ambari/blob/cbef0c14/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ClusterEntity.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ClusterEntity.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ClusterEntity.java
index 2c4d695..2e0a15d 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ClusterEntity.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ClusterEntity.java
@@ -145,6 +145,13 @@ public class ClusterEntity {
   })
   private ResourceEntity resource;
 
+  @OneToOne(cascade = CascadeType.ALL)
+  @JoinColumn(name = "upgrade_id", referencedColumnName = "upgrade_id")
+  /**
+   * {@code null} when there is no upgrade/downgrade in progress.
+   */
+  private UpgradeEntity upgradeEntity = null;
+
   public Long getClusterId() {
     return clusterId;
   }
@@ -351,4 +358,12 @@ public class ClusterEntity {
   public void setResource(ResourceEntity resource) {
     this.resource = resource;
   }
+
+  public UpgradeEntity getUpgradeEntity() {
+    return upgradeEntity;
+  }
+
+  public void setUpgradeEntity(UpgradeEntity upgradeEntity) {
+    this.upgradeEntity = upgradeEntity;
+  }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/cbef0c14/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/HostComponentStateEntity.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/HostComponentStateEntity.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/HostComponentStateEntity.java
index f92f645..1555321 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/HostComponentStateEntity.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/HostComponentStateEntity.java
@@ -85,8 +85,11 @@ public class HostComponentStateEntity {
   @Column(name = "component_name", nullable = false, insertable = false, updatable = false)
   private String componentName;
 
+  /**
+   * Version reported by host component during last status update.
+   */
   @Column(name = "version", nullable = false, insertable = true, updatable = true)
-  private String version = "UNKNOWN";
+  private String version = State.UNKNOWN.toString();
 
   @Enumerated(value = EnumType.STRING)
   @Column(name = "current_state", nullable = false, insertable = true, updatable = true)

http://git-wip-us.apache.org/repos/asf/ambari/blob/cbef0c14/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ServiceComponentDesiredStateEntity.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ServiceComponentDesiredStateEntity.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ServiceComponentDesiredStateEntity.java
index 519e4e6..7281c01 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ServiceComponentDesiredStateEntity.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ServiceComponentDesiredStateEntity.java
@@ -91,6 +91,13 @@ public class ServiceComponentDesiredStateEntity {
   @JoinColumn(name = "desired_stack_id", unique = false, nullable = false, insertable = true, updatable = true)
   private StackEntity desiredStack;
 
+  /**
+   * Version string that should be followed by instances
+   * of component on hosts. Includes both stack version and build
+   */
+  @Column(name = "desired_version", nullable = false, insertable = true, updatable = true)
+  private String desiredVersion = State.UNKNOWN.toString();
+
   @ManyToOne
   @JoinColumns({@javax.persistence.JoinColumn(name = "cluster_id", referencedColumnName = "cluster_id", nullable = false), @JoinColumn(name = "service_name", referencedColumnName = "service_name", nullable = false)})
   private ClusterServiceEntity clusterServiceEntity;
@@ -154,6 +161,14 @@ public class ServiceComponentDesiredStateEntity {
     this.desiredStack = desiredStack;
   }
 
+  public String getDesiredVersion() {
+    return desiredVersion;
+  }
+
+  public void setDesiredVersion(String desiredVersion) {
+    this.desiredVersion = desiredVersion;
+  }
+
   /**
    * Adds a historical entry for the version of this service component. New
    * entries are automatically created when this entities is merged via a

http://git-wip-us.apache.org/repos/asf/ambari/blob/cbef0c14/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/FinalizeUpgradeAction.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/FinalizeUpgradeAction.java b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/FinalizeUpgradeAction.java
index 0c8df78..9088c1c 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/FinalizeUpgradeAction.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/FinalizeUpgradeAction.java
@@ -181,7 +181,7 @@ public class FinalizeUpgradeAction extends AbstractServerAction {
       List<HostVersionEntity> hostVersions = hostVersionDAO.findByClusterStackAndVersion(
           clusterName, clusterDesiredStackId, version);
 
-      // Will include hosts whose state is UPGRADED, and potentially INSTALLED
+      // Will include hosts whose state is INSTALLED
       Set<HostVersionEntity> hostVersionsAllowed = new HashSet<HostVersionEntity>();
       Set<String> hostsWithoutCorrectVersionState = new HashSet<String>();
       Set<String> hostsToUpdate = new HashSet<String>();
@@ -193,12 +193,12 @@ public class FinalizeUpgradeAction extends AbstractServerAction {
         boolean hostHasCorrectVersionState = false;
         RepositoryVersionState hostVersionState = hostVersion.getState();
         switch( hostVersionState ){
-          case UPGRADED:
           case CURRENT:{
             // if the state is correct, then do nothing
             hostHasCorrectVersionState = true;
             break;
           }
+          case NOT_REQUIRED:
           case INSTALLED:{
             // It is possible that the host version has a state of INSTALLED and it
             // never changed if the host only has components that do not advertise a
@@ -209,12 +209,10 @@ public class FinalizeUpgradeAction extends AbstractServerAction {
                 host, clusterDesiredStackId);
 
             // if all components have finished advertising their version, then
-            // this host can be considered UPGRADED
+            // this host can be considered upgraded
             if (hostSummary.haveAllComponentsFinishedAdvertisingVersion()) {
-              // mark this as UPGRADED
+              // mark this as upgraded
               hostHasCorrectVersionState = true;
-              hostVersion.setState(RepositoryVersionState.UPGRADED);
-              hostVersion = hostVersionDAO.merge(hostVersion);
             } else {
               hostsWithoutCorrectVersionState.add(hostVersion.getHostName());
             }
@@ -235,7 +233,7 @@ public class FinalizeUpgradeAction extends AbstractServerAction {
         }
       }
 
-      // throw an exception if there are hosts which are not not fully UPGRADED
+      // throw an exception if there are hosts which are not not fully upgraded
       if (hostsWithoutCorrectVersionState.size() > 0) {
         String message = String.format("The following %d host(s) have not been upgraded to version %s. " +
                 "Please install and upgrade the Stack Version on those hosts and try again.\nHosts: %s\n",
@@ -265,22 +263,21 @@ public class FinalizeUpgradeAction extends AbstractServerAction {
       }
 
 
-      // we're guaranteed to be ready transition to UPGRADED now; ensure that
-      // the transition will be allowed if the cluster state is not UPGRADED
+      // we're guaranteed to be ready transition to upgraded now; ensure that
+      // the transition will be allowed if the cluster state is not upgraded
       upgradingClusterVersion = clusterVersionDAO.findByClusterAndStackAndVersion(clusterName,
           clusterDesiredStackId, version);
 
-      if (RepositoryVersionState.UPGRADING == upgradingClusterVersion.getState()) {
-        cluster.transitionClusterVersion(clusterDesiredStackId, version,
-            RepositoryVersionState.UPGRADED);
+      if (RepositoryVersionState.INSTALLING == upgradingClusterVersion.getState()) {
+        cluster.transitionClusterVersion(clusterDesiredStackId, version, RepositoryVersionState.INSTALLED);
 
         upgradingClusterVersion = clusterVersionDAO.findByClusterAndStackAndVersion(
             clusterName, clusterDesiredStackId, version);
       }
 
       // we cannot finalize since the cluster was not ready to move into the
-      // UPGRADED state
-      if (RepositoryVersionState.UPGRADED != upgradingClusterVersion.getState()) {
+      // upgraded state
+      if (RepositoryVersionState.INSTALLED != upgradingClusterVersion.getState()) {
         throw new AmbariException(String.format("The cluster stack version state %s is not allowed to transition directly into %s",
             upgradingClusterVersion.getState(), RepositoryVersionState.CURRENT.toString()));
       }
@@ -303,6 +300,9 @@ public class FinalizeUpgradeAction extends AbstractServerAction {
           String.format("Finalizing the version for %d host(s).\n", hostVersionsAllowed.size()));
       cluster.mapHostVersions(hostsToUpdate, upgradingClusterVersion, RepositoryVersionState.CURRENT);
 
+      // Reset upgrade state
+      cluster.setUpgradeEntity(null);
+
       // transitioning the cluster into CURRENT will update the current/desired
       // stack values
       outSB.append(String.format("Finalizing the version for cluster %s.\n", clusterName));
@@ -405,9 +405,9 @@ public class FinalizeUpgradeAction extends AbstractServerAction {
       // update the cluster version
       for (ClusterVersionEntity cve : clusterVersionDAO.findByCluster(clusterName)) {
         switch (cve.getState()) {
-          case UPGRADE_FAILED:
-          case UPGRADED:
-          case UPGRADING: {
+          case INSTALL_FAILED:
+          case INSTALLED:
+          case INSTALLING: {
               badVersions.add(cve.getRepositoryVersion().getVersion());
               cve.setState(RepositoryVersionState.INSTALLED);
               clusterVersionDAO.merge(cve);
@@ -447,6 +447,8 @@ public class FinalizeUpgradeAction extends AbstractServerAction {
       // ensure that when downgrading, we set the desired back to the
       // original value
       cluster.setDesiredStackVersion(currentClusterStackId);
+      // Reset upgrade state
+      cluster.setUpgradeEntity(null);
 
       return createCommandReport(0, HostRoleStatus.COMPLETED, "{}",
           out.toString(), err.toString());
@@ -467,7 +469,7 @@ public class FinalizeUpgradeAction extends AbstractServerAction {
    * have been upgraded to the target version.
    * @param cluster         the cluster the upgrade is for
    * @param desiredVersion  the target version of the upgrade
-   * @param targetStack     the target stack id for meta-info lookup
+   * @param targetStackId     the target stack id for meta-info lookup
    * @return the list of {@link InfoTuple} objects of host components in error
    */
   protected List<InfoTuple> checkHostComponentVersions(Cluster cluster, String desiredVersion, StackId targetStackId)

http://git-wip-us.apache.org/repos/asf/ambari/blob/cbef0c14/ambari-server/src/main/java/org/apache/ambari/server/stack/MasterHostResolver.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/stack/MasterHostResolver.java b/ambari-server/src/main/java/org/apache/ambari/server/stack/MasterHostResolver.java
index 360f2b8..b813625 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/stack/MasterHostResolver.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/stack/MasterHostResolver.java
@@ -208,7 +208,8 @@ public class MasterHostResolver {
         if (maintenanceState != MaintenanceState.OFF) {
           unhealthyHosts.add(sch);
         } else if (null == m_version || null == sch.getVersion() ||
-            !sch.getVersion().equals(m_version) || sch.getUpgradeState() == UpgradeState.FAILED) {
+            !sch.getVersion().equals(m_version) ||
+            sch.getUpgradeState() == UpgradeState.FAILED) {
           upgradeHosts.add(hostName);
         }
       }

http://git-wip-us.apache.org/repos/asf/ambari/blob/cbef0c14/ambari-server/src/main/java/org/apache/ambari/server/state/Cluster.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/Cluster.java b/ambari-server/src/main/java/org/apache/ambari/server/state/Cluster.java
index b430525..f26f471 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/Cluster.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/Cluster.java
@@ -32,6 +32,7 @@ import org.apache.ambari.server.orm.entities.HostEntity;
 import org.apache.ambari.server.orm.entities.HostVersionEntity;
 import org.apache.ambari.server.orm.entities.PrivilegeEntity;
 import org.apache.ambari.server.orm.entities.RepositoryVersionEntity;
+import org.apache.ambari.server.orm.entities.UpgradeEntity;
 import org.apache.ambari.server.security.authorization.AuthorizationException;
 import org.apache.ambari.server.state.configgroup.ConfigGroup;
 import org.apache.ambari.server.state.scheduler.RequestExecution;
@@ -630,4 +631,18 @@ public interface Cluster {
    * @return true if the cluster was deployed with a Blueprint otherwise false.
    */
   boolean isBluePrintDeployed();
+
+  /**
+   * @return upgrade that is in progress for a cluster. If no upgrade is going
+   * on, a null is returned.
+   */
+  UpgradeEntity getUpgradeEntity();
+
+  /**
+   * The value is explicitly set on the ClusterEntity when Creating,
+   * Aborting (switching to downgrade), Resuming, or Finalizing an upgrade.
+   * @param upgradeEntity the upgrade entity to set for cluster
+   * @throws AmbariException
+   */
+  void setUpgradeEntity(UpgradeEntity upgradeEntity) throws AmbariException;
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/cbef0c14/ambari-server/src/main/java/org/apache/ambari/server/state/RepositoryVersionState.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/RepositoryVersionState.java b/ambari-server/src/main/java/org/apache/ambari/server/state/RepositoryVersionState.java
index 344f358..119205a 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/RepositoryVersionState.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/RepositoryVersionState.java
@@ -21,8 +21,7 @@ package org.apache.ambari.server.state;
 /**
  * There must be exactly one repository version that is in a CURRENT state for a particular cluster or host.
  * There may be 0 or more repository versions in an INSTALLED or INSTALLING state.
- * A repository version state transitions from UPGRADING -> UPGRADED | UPGRADE_FAILED
- * The operation to transition a repository version state from UPGRADED into CURRENT must be atomic and change the existing
+ * The operation to transition a repository version state from INSTALLED into CURRENT must be atomic and change the existing
  * relation between repository version and cluster or host from CURRENT to INSTALLED.
  *
  * <pre>
@@ -42,40 +41,25 @@ package org.apache.ambari.server.state;
  * Version 1: CURRENT
  * Version 2: INSTALL_FAILED (a retry can set this back to INSTALLING)
  *
- * Step 4: Start an upgrade from Version 1 to Version 2
- * Version 1: CURRENT
- * Version 2: UPGRADING
- *
- * Step 5: Upgrade can either complete successfully or fail
- * Version 1: CURRENT
- * Version 2: UPGRADE_FAILED (a retry can set this back to UPGRADING)
- *
- * or
- *
+ * Step 4: Perform an upgrade from Version 1 to Version 2
  * Version 1: INSTALLED
  * Version 2: CURRENT
  *
- * Step 4: May revert to the original version via a downgrade, which is technically still an upgrade to a version.
- * Version 1: UPGRADING
- * Version 2: CURRENT
- *
+ * Step 4: May revert to the original version via a downgrade, which is technically still an upgrade to a version
  * and eventually becomes
  *
  * Version 1: CURRENT
  * Version 2: INSTALLED
  *
  * *********************************************
- * Start states: CURRENT, UPGRADING, INSTALLING
+ * Start states: CURRENT, INSTALLING
  * Allowed Transitions:
- * UPGRADING -> UPGRADED | UPGRADE_FAILED
- * UPGRADE_FAILED -> UPGRADING
- * UPGRADED -> CURRENT
+ * INSTALLED -> CURRENT
  * INSTALLING -> INSTALLED | INSTALL_FAILED | OUT_OF_SYNC
  * INSTALLED -> INSTALLED | INSTALLING | OUT_OF_SYNC
  * OUT_OF_SYNC -> INSTALLING
  * INSTALL_FAILED -> INSTALLING
  * CURRENT -> INSTALLED
- * INSTALLED -> UPGRADING
  * </pre>
  */
 public enum RepositoryVersionState {
@@ -103,18 +87,5 @@ public enum RepositoryVersionState {
    * Repository version that is installed and supported and is the active version.
    */
   CURRENT,
-  /**
-   * Repository version that is in the process of upgrading to become the CURRENT active version,
-   * and the previous active version transitions to an INSTALLED state.
-   */
-  UPGRADING,
-  /**
-   * Repository version that during the upgrade process failed to become the active version and must be remedied.
-   */
-  UPGRADE_FAILED,
-  /**
-   * Repository version that finished upgrading and should be finalized to become CURRENT.
-   */
-  UPGRADED
 
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/cbef0c14/ambari-server/src/main/java/org/apache/ambari/server/state/ServiceComponent.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/ServiceComponent.java b/ambari-server/src/main/java/org/apache/ambari/server/state/ServiceComponent.java
index dcb7cf6..983cbdf 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/ServiceComponent.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/ServiceComponent.java
@@ -56,6 +56,10 @@ public interface ServiceComponent {
 
   void setDesiredStackVersion(StackId stackVersion);
 
+  String getDesiredVersion();
+
+  void setDesiredVersion(String version);
+
   Map<String, ServiceComponentHost> getServiceComponentHosts();
 
   ServiceComponentHost getServiceComponentHost(String hostname)

http://git-wip-us.apache.org/repos/asf/ambari/blob/cbef0c14/ambari-server/src/main/java/org/apache/ambari/server/state/ServiceComponentHost.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/ServiceComponentHost.java b/ambari-server/src/main/java/org/apache/ambari/server/state/ServiceComponentHost.java
index f1e8d62..2a062a7 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/ServiceComponentHost.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/ServiceComponentHost.java
@@ -32,30 +32,30 @@ public interface ServiceComponentHost {
   /**
    * Get the Cluster that this object maps to
    */
-  public long getClusterId();
+  long getClusterId();
 
   /**
    * Get the Cluster that this object maps to
    */
-  public String getClusterName();
+  String getClusterName();
 
   /**
    * Get the Service this object maps to
    * @return Name of the Service
    */
-  public String getServiceName();
+  String getServiceName();
 
   /**
    * Get the ServiceComponent this object maps to
    * @return Name of the ServiceComponent
    */
-  public String getServiceComponentName();
+  String getServiceComponentName();
 
   /**
    * Get the Host this object maps to
    * @return Host's hostname
    */
-  public String getHostName();
+  String getHostName();
 
   /**
    * Get the Host this object maps to
@@ -68,20 +68,20 @@ public interface ServiceComponentHost {
    * @param event Event to handle
    * @throws InvalidStateTransitionException
    */
-  public void handleEvent(ServiceComponentHostEvent event)
+  void handleEvent(ServiceComponentHostEvent event)
       throws InvalidStateTransitionException;
 
-  public State getDesiredState();
+  State getDesiredState();
 
-  public void setDesiredState(State state);
+  void setDesiredState(State state);
 
-  public StackId getDesiredStackVersion();
+  StackId getDesiredStackVersion();
 
-  public void setDesiredStackVersion(StackId stackVersion);
+  void setDesiredStackVersion(StackId stackVersion);
 
-  public State getState();
+  State getState();
 
-  public void setState(State state);
+  void setState(State state);
 
   /**
    * Gets the current security state for this ServiceComponent
@@ -90,7 +90,7 @@ public interface ServiceComponentHost {
    *
    * @return the current SecurityState for this ServiceComponent
    */
-  public SecurityState getSecurityState();
+  SecurityState getSecurityState();
 
   /**
    * Sets the current security state for this ServiceComponent
@@ -99,21 +99,21 @@ public interface ServiceComponentHost {
    *
    * @param state the current SecurityState for this ServiceComponent
    */
-  public void setSecurityState(SecurityState state);
+  void setSecurityState(SecurityState state);
 
   /**
    * Gets the version of the component.
    *
    * @return component version
    */
-  public String getVersion();
+  String getVersion();
 
   /**
    * Sets the version of the component from the stack.
    *
    * @param version component version (e.g. 2.2.0.0-2041)
    */
-  public void setVersion(String version);
+  void setVersion(String version);
 
   /**
    * Gets the desired security state for this ServiceComponent
@@ -123,7 +123,7 @@ public interface ServiceComponentHost {
    *
    * @return the desired SecurityState for this ServiceComponent
    */
-  public SecurityState getDesiredSecurityState();
+  SecurityState getDesiredSecurityState();
 
   /**
    * Sets the desired security state for this ServiceComponent
@@ -134,27 +134,32 @@ public interface ServiceComponentHost {
    * @param securityState the desired SecurityState for this ServiceComponent
    * @throws AmbariException if the new state is not an endpoint state
    */
-  public void setDesiredSecurityState(SecurityState securityState) throws AmbariException;
+  void setDesiredSecurityState(SecurityState securityState) throws AmbariException;
 
   /**
    * @param upgradeState the upgrade state
    */
-  public void setUpgradeState(UpgradeState upgradeState);
+  void setUpgradeState(UpgradeState upgradeState);
 
   /**
-   * @return the upgrade state
+   * @return the upgrade state. Valid values:
+   * NONE  - means that component is installed and good to go, no upgrade in progress
+   * IN_PROGRESS - means that component is being upgraded
+   * COMPLETE - means that component has reported a correct new version during upgrade
+   * FAILED - means that failed and component did not get upgraded
+   * VERSION_MISMATCH - means that component reported unexpected version
    */
-  public UpgradeState getUpgradeState();
+  UpgradeState getUpgradeState();
 
-  public StackId getStackVersion();
+  StackId getStackVersion();
 
-  public void setStackVersion(StackId stackVersion);
+  void setStackVersion(StackId stackVersion);
 
-  public HostComponentAdminState getComponentAdminState();
+  HostComponentAdminState getComponentAdminState();
 
-  public void setComponentAdminState(HostComponentAdminState attribute);
+  void setComponentAdminState(HostComponentAdminState attribute);
 
-  public ServiceComponentHostResponse convertToResponse();
+  ServiceComponentHostResponse convertToResponse();
 
   boolean isPersisted();
 
@@ -162,62 +167,62 @@ public interface ServiceComponentHost {
 
   void refresh();
 
-  public void debugDump(StringBuilder sb);
+  void debugDump(StringBuilder sb);
 
-  public boolean canBeRemoved();
+  boolean canBeRemoved();
 
-  public void delete() throws AmbariException;
+  void delete() throws AmbariException;
 
   /**
    * Updates the tags that have been recognized by a START action.
    * @param configTags
    */
-  public void updateActualConfigs(Map<String, Map<String, String>> configTags);
+  void updateActualConfigs(Map<String, Map<String, String>> configTags);
 
   /**
    * Gets the actual config tags, if known.
    * @return the actual config map
    */
-  public Map<String, HostConfig> getActualConfigs();
+  Map<String, HostConfig> getActualConfigs();
 
-  public HostState getHostState();
+  HostState getHostState();
 
   /**
    * @param state the maintenance state
    */
-  public void setMaintenanceState(MaintenanceState state);
+  void setMaintenanceState(MaintenanceState state);
 
   /**
    * @return the maintenance state
    */
-  public MaintenanceState getMaintenanceState();
+  MaintenanceState getMaintenanceState();
 
   /**
    * @param procs a list containing a map describing each process
    */
-  public void setProcesses(List<Map<String, String>> procs);
+  void setProcesses(List<Map<String, String>> procs);
 
 
   /**
    * @return the list of maps describing each process
    */
-  public List<Map<String, String>> getProcesses();
+  List<Map<String, String>> getProcesses();
 
   /**
    * @return whether restart required
    */
-  public boolean isRestartRequired();
+  boolean isRestartRequired();
 
   /**
    * @param restartRequired the restartRequired flag
    */
-  public void setRestartRequired(boolean restartRequired);
+  void setRestartRequired(boolean restartRequired);
 
   /**
    * Changes host version state according to state of the components installed on the host.
    * @return The Repository Version Entity with that component in the host
    * @throws AmbariException if host is detached from the cluster
    */
-  public RepositoryVersionEntity recalculateHostVersionState() throws AmbariException;
+  RepositoryVersionEntity recalculateHostVersionState() throws AmbariException;
 
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/cbef0c14/ambari-server/src/main/java/org/apache/ambari/server/state/ServiceComponentImpl.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/ServiceComponentImpl.java b/ambari-server/src/main/java/org/apache/ambari/server/state/ServiceComponentImpl.java
index eca911d..197f4cd 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/ServiceComponentImpl.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/ServiceComponentImpl.java
@@ -93,6 +93,7 @@ public class ServiceComponentImpl implements ServiceComponent {
     desiredStateEntity = new ServiceComponentDesiredStateEntity();
     desiredStateEntity.setComponentName(componentName);
     desiredStateEntity.setDesiredState(State.INIT);
+    desiredStateEntity.setDesiredVersion(State.UNKNOWN.toString());
     desiredStateEntity.setServiceName(service.getName());
     desiredStateEntity.setClusterId(service.getClusterId());
     desiredStateEntity.setRecoveryEnabled(false);
@@ -467,6 +468,33 @@ public class ServiceComponentImpl implements ServiceComponent {
         LOG.warn("Setting a member on an entity object that may have been " +
           "previously deleted, serviceName = " + (service != null ? service.getName() : ""));
       }
+    } finally {
+      readWriteLock.writeLock().unlock();
+    }
+  }
+
+  @Override
+  public String getDesiredVersion() {
+    readWriteLock.readLock().lock();
+    try {
+      return getDesiredStateEntity().getDesiredVersion();
+    } finally {
+      readWriteLock.readLock().unlock();
+    }
+  }
+
+  @Override
+  public void setDesiredVersion(String version) {
+    readWriteLock.writeLock().lock();
+    try {
+      ServiceComponentDesiredStateEntity desiredStateEntity = getDesiredStateEntity();
+      if (desiredStateEntity != null) {
+        desiredStateEntity.setDesiredVersion(version);
+        saveIfPersisted(desiredStateEntity);
+      } else {
+        LOG.warn("Setting a member on an entity object that may have been " +
+          "previously deleted, serviceName = " + (service != null ? service.getName() : ""));
+      }
 
     } finally {
       readWriteLock.writeLock().unlock();

http://git-wip-us.apache.org/repos/asf/ambari/blob/cbef0c14/ambari-server/src/main/java/org/apache/ambari/server/state/UpgradeHelper.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/UpgradeHelper.java b/ambari-server/src/main/java/org/apache/ambari/server/state/UpgradeHelper.java
index 5143bfa..05d49c4 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/UpgradeHelper.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/UpgradeHelper.java
@@ -722,4 +722,26 @@ public class UpgradeHelper {
       LOG.debug("Could not get service detail", e);
     }
   }
+
+  /**
+   * Transitions all affected components to upgrading state. Transition is performed
+   * only for components that advertise their version. Service component desired
+   * version is set to one passed as an argument
+   * @param version desired version (like 2.2.1.0-1234) for upgrade
+   * @param targetServices targets for upgrade
+   */
+  public void putComponentsToUpgradingState(String version,
+                                            Map<Service, Set<ServiceComponent>> targetServices) throws AmbariException {
+    // TODO: generalize method?
+    for (Map.Entry<Service, Set<ServiceComponent>> entry: targetServices.entrySet()) {
+      for (ServiceComponent serviceComponent: entry.getValue()) {
+        if (serviceComponent.isVersionAdvertised()) {
+          for (ServiceComponentHost serviceComponentHost: serviceComponent.getServiceComponentHosts().values()) {
+            serviceComponentHost.setUpgradeState(UpgradeState.IN_PROGRESS);
+          }
+          serviceComponent.setDesiredVersion(version);
+        }
+      }
+    }
+  }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/cbef0c14/ambari-server/src/main/java/org/apache/ambari/server/state/UpgradeState.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/UpgradeState.java b/ambari-server/src/main/java/org/apache/ambari/server/state/UpgradeState.java
index ced1dd3..889e92d 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/UpgradeState.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/UpgradeState.java
@@ -17,6 +17,8 @@
  */
 package org.apache.ambari.server.state;
 
+import java.util.EnumSet;
+
 /**
  * Indicates the upgrade state
  */
@@ -30,16 +32,26 @@ public enum UpgradeState {
    */
   COMPLETE,
   /**
-   * Upgrade is pending
-   */
-  PENDING,
-  /**
    * Upgrade is in progress
    */
   IN_PROGRESS,
   /**
    * Upgrade has failed
    */
-  FAILED
+  FAILED,
+  /**
+   * Component reported unexpected/wrong version
+   */
+  VERSION_MISMATCH;
+
+  /**
+   * States when new/correct version has not been yet advertised
+   */
+  public static final EnumSet<UpgradeState> VERSION_NON_ADVERTISED_STATES = EnumSet.of(IN_PROGRESS, FAILED, VERSION_MISMATCH);
+
+  /**
+   * States when component is believed to participate in upgrade
+   */
+  public static final EnumSet<UpgradeState> ONGOING_UPGRADE_STATES = EnumSet.of(IN_PROGRESS, FAILED, COMPLETE);
 
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/cbef0c14/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClusterImpl.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClusterImpl.java b/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClusterImpl.java
index 07addfc..727eaf3 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClusterImpl.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClusterImpl.java
@@ -22,6 +22,7 @@ import java.text.MessageFormat;
 import java.util.ArrayList;
 import java.util.Collection;
 import java.util.Collections;
+import java.util.EnumSet;
 import java.util.HashMap;
 import java.util.HashSet;
 import java.util.Iterator;
@@ -91,6 +92,7 @@ import org.apache.ambari.server.orm.entities.ResourceEntity;
 import org.apache.ambari.server.orm.entities.ServiceConfigEntity;
 import org.apache.ambari.server.orm.entities.StackEntity;
 import org.apache.ambari.server.orm.entities.TopologyRequestEntity;
+import org.apache.ambari.server.orm.entities.UpgradeEntity;
 import org.apache.ambari.server.security.authorization.AuthorizationException;
 import org.apache.ambari.server.security.authorization.AuthorizationHelper;
 import org.apache.ambari.server.state.Cluster;
@@ -150,6 +152,8 @@ public class ClusterImpl implements Cluster {
    * Prefix for cluster session attributes name.
    */
   private static final String CLUSTER_SESSION_ATTRIBUTES_PREFIX = "cluster_session_attributes:";
+  private static final Set<RepositoryVersionState> ALLOWED_REPOSITORY_STATES =
+      EnumSet.of(RepositoryVersionState.INSTALLING);
 
   @Inject
   private Clusters clusters;
@@ -1154,10 +1158,10 @@ public class ClusterImpl implements Cluster {
   }
 
   /**
-   * During the Finalize Action, want to transition all Host Versions from UPGRADED to CURRENT, and the last CURRENT one to INSTALLED.
+   * During the Finalize Action, want to transition all Host Versions from INSTALLED to CURRENT, and the last CURRENT one to INSTALLED.
    * @param hostNames Collection of host names
    * @param currentClusterVersion Entity that contains the cluster's current stack (with its name and version)
-   * @param desiredState Desired state must be {@link RepositoryVersionState#CURRENT} or {@link RepositoryVersionState#UPGRADING}
+   * @param desiredState Desired state must be {@link RepositoryVersionState#CURRENT}
    * @throws AmbariException
    */
   @Override
@@ -1221,7 +1225,7 @@ public class ClusterImpl implements Cluster {
               && desiredState == RepositoryVersionState.CURRENT
               && currentHostVersionEntity.getState() == RepositoryVersionState.CURRENT) {
             currentHostVersionEntity.setState(RepositoryVersionState.INSTALLED);
-            currentHostVersionEntity = hostVersionDAO.merge(currentHostVersionEntity);
+            hostVersionDAO.merge(currentHostVersionEntity);
           }
         }
       }
@@ -1329,10 +1333,6 @@ public class ClusterImpl implements Cluster {
    * Calculate the effective Cluster Version State based on the state of its hosts.
    *
    * CURRENT: all hosts are CURRENT
-   * UPGRADE_FAILED: at least one host in UPGRADE_FAILED
-   * UPGRADED: all hosts are UPGRADED
-   * UPGRADING: at least one host is UPGRADING, and the rest in UPGRADING|INSTALLED
-   * UPGRADING: at least one host is UPGRADED, and the rest in UPGRADING|INSTALLED
    * INSTALLED: all hosts in INSTALLED
    * INSTALL_FAILED: at least one host in INSTALL_FAILED
    * INSTALLING: all hosts in INSTALLING -or- INSTALLING and NOT_REQUIRED. Notice that if one host is CURRENT and another is INSTALLING, then the
@@ -1354,22 +1354,6 @@ public class ClusterImpl implements Cluster {
     if (stateToHosts.containsKey(RepositoryVersionState.CURRENT) && stateToHosts.get(RepositoryVersionState.CURRENT).size() == totalHosts) {
       return RepositoryVersionState.CURRENT;
     }
-    if (stateToHosts.containsKey(RepositoryVersionState.UPGRADE_FAILED) && !stateToHosts.get(RepositoryVersionState.UPGRADE_FAILED).isEmpty()) {
-      return RepositoryVersionState.UPGRADE_FAILED;
-    }
-    if (stateToHosts.containsKey(RepositoryVersionState.UPGRADED) && stateToHosts.get(RepositoryVersionState.UPGRADED).size() == totalHosts) {
-      return RepositoryVersionState.UPGRADED;
-    }
-    if (stateToHosts.containsKey(RepositoryVersionState.UPGRADING) && !stateToHosts.get(RepositoryVersionState.UPGRADING).isEmpty()) {
-      return RepositoryVersionState.UPGRADING;
-    }
-    if (stateToHosts.containsKey(RepositoryVersionState.UPGRADED)
-        && !stateToHosts.get(RepositoryVersionState.UPGRADED).isEmpty()
-        && stateToHosts.get(RepositoryVersionState.UPGRADED).size() != totalHosts) {
-      // It is possible that a host has transitioned to UPGRADED state even before any other host has transitioned to UPGRADING state.
-      // Example: Host with single component ZOOKEEPER Server on it which is the first component to be upgraded.
-      return RepositoryVersionState.UPGRADING;
-    }
     if (stateToHosts.containsKey(RepositoryVersionState.INSTALLED) && stateToHosts.get(RepositoryVersionState.INSTALLED).size() == totalHosts) {
       return RepositoryVersionState.INSTALLED;
     }
@@ -1404,9 +1388,9 @@ public class ClusterImpl implements Cluster {
       }
     }
 
-    // Also returns when have a mix of CURRENT and INSTALLING|INSTALLED|UPGRADING|UPGRADED
-    LOG.warn("have a mix of CURRENT and INSTALLING|INSTALLED|UPGRADING|UPGRADED host versions, " +
-      "returning OUT_OF_SYNC as cluster version. Host version states: " + stateToHosts.toString());
+    // Also returns when have a mix of CURRENT and INSTALLING|INSTALLED
+    LOG.warn("have a mix of CURRENT and INSTALLING|INSTALLED host versions, " +
+      "returning OUT_OF_SYNC as cluster version. Host version states: {}", stateToHosts);
     return RepositoryVersionState.OUT_OF_SYNC;
   }
 
@@ -1444,7 +1428,7 @@ public class ClusterImpl implements Cluster {
               stackId,
               version,
               AuthorizationHelper.getAuthenticatedName(configuration.getAnonymousAuditName()),
-              RepositoryVersionState.UPGRADING);
+              RepositoryVersionState.INSTALLING);
           clusterVersion = clusterVersionDAO.findByClusterAndStackAndVersion(
               getClusterName(), stackId, version);
 
@@ -1461,14 +1445,11 @@ public class ClusterImpl implements Cluster {
           return;
         }
       }
-
       // Ignore if cluster version is CURRENT or UPGRADE_FAILED
       if (clusterVersion.getState() != RepositoryVersionState.INSTALL_FAILED &&
               clusterVersion.getState() != RepositoryVersionState.OUT_OF_SYNC &&
               clusterVersion.getState() != RepositoryVersionState.INSTALLING &&
-              clusterVersion.getState() != RepositoryVersionState.INSTALLED &&
-              clusterVersion.getState() != RepositoryVersionState.UPGRADING &&
-              clusterVersion.getState() != RepositoryVersionState.UPGRADED) {
+              clusterVersion.getState() != RepositoryVersionState.INSTALLED) {
         // anything else is not supported as of now
         return;
       }
@@ -1584,7 +1565,7 @@ public class ClusterImpl implements Cluster {
           // That is an initial bootstrap
           performingInitialBootstrap = true;
         }
-        hostVersionEntity = new HostVersionEntity(host, repositoryVersion, RepositoryVersionState.UPGRADING);
+        hostVersionEntity = new HostVersionEntity(host, repositoryVersion, RepositoryVersionState.INSTALLING);
         hostVersionDAO.create(hostVersionEntity);
       }
 
@@ -1595,22 +1576,17 @@ public class ClusterImpl implements Cluster {
       if (!isCurrentPresent) {
         // Transition from UPGRADING -> CURRENT. This is allowed because Host Version Entity is bootstrapped in an UPGRADING state.
         // Alternatively, transition to CURRENT during initial bootstrap if at least one host component advertised a version
-        if (hostSummary.isUpgradeFinished() && hostVersionEntity.getState().equals(RepositoryVersionState.UPGRADING) || performingInitialBootstrap) {
+        if (hostSummary.isUpgradeFinished() || performingInitialBootstrap) {
           hostVersionEntity.setState(RepositoryVersionState.CURRENT);
           hostVersionEntity = hostVersionDAO.merge(hostVersionEntity);
         }
       } else {
         // Handle transitions during a Stack Upgrade
+        if (hostSummary.isUpgradeFinished() && hostVersionEntity.getState().equals(RepositoryVersionState.INSTALLED)) {
+          currentVersionEntity.setState(RepositoryVersionState.INSTALLED);
+          hostVersionEntity.setState(RepositoryVersionState.CURRENT);
 
-        // If a host only has one Component to update, that single report can still transition the host version from
-        // INSTALLED->UPGRADING->UPGRADED in one shot.
-        if (hostSummary.isUpgradeInProgress(currentVersionEntity.getRepositoryVersion().getVersion()) && hostVersionEntity.getState().equals(RepositoryVersionState.INSTALLED)) {
-          hostVersionEntity.setState(RepositoryVersionState.UPGRADING);
-          hostVersionEntity = hostVersionDAO.merge(hostVersionEntity);
-        }
-
-        if (hostSummary.isUpgradeFinished() && hostVersionEntity.getState().equals(RepositoryVersionState.UPGRADING)) {
-          hostVersionEntity.setState(RepositoryVersionState.UPGRADED);
+          hostVersionDAO.merge(currentVersionEntity);
           hostVersionEntity = hostVersionDAO.merge(hostVersionEntity);
         }
       }
@@ -1658,16 +1634,8 @@ public class ClusterImpl implements Cluster {
    */
   private void createClusterVersionInternal(StackId stackId, String version,
       String userName, RepositoryVersionState state) throws AmbariException {
-    Set<RepositoryVersionState> allowedStates = new HashSet<RepositoryVersionState>();
-    Collection<ClusterVersionEntity> allClusterVersions = getAllClusterVersions();
-    if (allClusterVersions == null || allClusterVersions.isEmpty()) {
-      allowedStates.add(RepositoryVersionState.UPGRADING);
-    } else {
-      allowedStates.add(RepositoryVersionState.INSTALLING);
-    }
-
-    if (!allowedStates.contains(state)) {
-      throw new AmbariException("The allowed state for a new cluster version must be within " + allowedStates);
+    if (!ALLOWED_REPOSITORY_STATES.contains(state)) {
+      throw new AmbariException("The allowed state for a new cluster version must be within " + ALLOWED_REPOSITORY_STATES);
     }
 
     ClusterVersionEntity existing = clusterVersionDAO.findByClusterAndStackAndVersion(
@@ -1701,10 +1669,10 @@ public class ClusterImpl implements Cluster {
    * following are some of the steps that are taken when transitioning between
    * specific states:
    * <ul>
-   * <li>UPGRADING/UPGRADED --> CURRENT</lki>: Set the current stack to the
+   * <li>INSTALLING/INSTALLED --> CURRENT</lki>: Set the current stack to the
    * desired stack, ensure all hosts with the desired stack are CURRENT as well.
    * </ul>
-   * <li>UPGRADING/UPGRADED --> CURRENT</lki>: Set the current stack to the
+   * <li>INSTALLING/INSTALLED --> CURRENT</lki>: Set the current stack to the
    * desired stack. </ul>
    *
    * @param stackId
@@ -1751,31 +1719,21 @@ public class ClusterImpl implements Cluster {
             allowedStates.add(RepositoryVersionState.INSTALLED);
             allowedStates.add(RepositoryVersionState.INSTALL_FAILED);
             allowedStates.add(RepositoryVersionState.OUT_OF_SYNC);
+            if (clusterVersionDAO.findByClusterAndStateCurrent(getClusterName()) == null) {
+              allowedStates.add(RepositoryVersionState.CURRENT);
+            }
             break;
           case INSTALL_FAILED:
             allowedStates.add(RepositoryVersionState.INSTALLING);
             break;
           case INSTALLED:
             allowedStates.add(RepositoryVersionState.INSTALLING);
-            allowedStates.add(RepositoryVersionState.UPGRADING);
             allowedStates.add(RepositoryVersionState.OUT_OF_SYNC);
+            allowedStates.add(RepositoryVersionState.CURRENT);
             break;
           case OUT_OF_SYNC:
             allowedStates.add(RepositoryVersionState.INSTALLING);
             break;
-          case UPGRADING:
-            allowedStates.add(RepositoryVersionState.UPGRADED);
-            allowedStates.add(RepositoryVersionState.UPGRADE_FAILED);
-            if (clusterVersionDAO.findByClusterAndStateCurrent(getClusterName()) == null) {
-              allowedStates.add(RepositoryVersionState.CURRENT);
-            }
-            break;
-          case UPGRADED:
-            allowedStates.add(RepositoryVersionState.CURRENT);
-            break;
-          case UPGRADE_FAILED:
-            allowedStates.add(RepositoryVersionState.UPGRADING);
-            break;
         }
 
         if (!allowedStates.contains(state)) {
@@ -3465,4 +3423,45 @@ public class ClusterImpl implements Cluster {
   private ClusterEntity getClusterEntity() {
     return clusterDAO.findById(clusterId);
   }
+
+  /**
+   * {@inheritDoc}
+   */
+  @Override
+  public UpgradeEntity getUpgradeEntity() {
+    clusterGlobalLock.readLock().lock();
+    try {
+      ClusterEntity clusterEntity = getClusterEntity();
+      if (clusterEntity != null) {
+        return clusterEntity.getUpgradeEntity();
+      } else {
+        return null;
+      }
+    } finally {
+      clusterGlobalLock.readLock().unlock();
+    }
+  }
+
+  /**
+   * {@inheritDoc}
+   */
+  @Override
+  @Transactional
+  public void setUpgradeEntity(UpgradeEntity upgradeEntity) throws AmbariException {
+    clusterGlobalLock.writeLock().lock();
+    try {
+      ClusterEntity clusterEntity = getClusterEntity();
+      if (clusterEntity != null) {
+        clusterEntity.setUpgradeEntity(upgradeEntity);
+        clusterDAO.merge(clusterEntity);
+      }
+    } catch (RollbackException e) {
+      String msg = "Unable to set upgrade entiry " + upgradeEntity + " for cluster "
+        + getClusterName();
+      LOG.warn(msg);
+      throw new AmbariException(msg, e);
+    } finally {
+      clusterGlobalLock.writeLock().unlock();
+    }
+  }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/cbef0c14/ambari-server/src/main/java/org/apache/ambari/server/state/svccomphost/ServiceComponentHostImpl.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/svccomphost/ServiceComponentHostImpl.java b/ambari-server/src/main/java/org/apache/ambari/server/state/svccomphost/ServiceComponentHostImpl.java
index 92828af..2b926b4 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/svccomphost/ServiceComponentHostImpl.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/svccomphost/ServiceComponentHostImpl.java
@@ -1790,8 +1790,11 @@ public class ServiceComponentHostImpl implements ServiceComponentHost {
   public RepositoryVersionEntity recalculateHostVersionState() throws AmbariException {
     RepositoryVersionEntity repositoryVersion = null;
     String version = getVersion();
-    if (version == null || version.isEmpty() || version.equalsIgnoreCase(State.UNKNOWN.toString())) {
-      // Recalculate only if some particular version is set
+    if (getUpgradeState().equals(UpgradeState.IN_PROGRESS) ||
+      getUpgradeState().equals(UpgradeState.VERSION_MISMATCH) ||
+        State.UNKNOWN.toString().equals(version)) {
+      // TODO: we still recalculate host version if upgrading component failed. It seems to be ok
+      // Recalculate only if no upgrade in progress/no version mismatch
       return null;
     }
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/cbef0c14/ambari-server/src/main/java/org/apache/ambari/server/state/svccomphost/ServiceComponentHostSummary.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/svccomphost/ServiceComponentHostSummary.java b/ambari-server/src/main/java/org/apache/ambari/server/state/svccomphost/ServiceComponentHostSummary.java
index 1c36143..bccdb25 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/svccomphost/ServiceComponentHostSummary.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/svccomphost/ServiceComponentHostSummary.java
@@ -23,10 +23,11 @@ import org.apache.ambari.server.AmbariException;
 import org.apache.ambari.server.api.services.AmbariMetaInfo;
 import org.apache.ambari.server.orm.entities.HostComponentStateEntity;
 import org.apache.ambari.server.orm.entities.HostEntity;
+import org.apache.ambari.server.orm.entities.UpgradeEntity;
 import org.apache.ambari.server.state.ComponentInfo;
 import org.apache.ambari.server.state.StackId;
 import org.apache.ambari.server.state.State;
-import org.apache.commons.lang.StringUtils;
+import org.apache.ambari.server.state.UpgradeState;
 
 import java.util.Collection;
 import java.util.HashSet;
@@ -36,7 +37,7 @@ import java.util.Set;
 /**
  * Represents a summary of the versions of the components installed on a host.
  */
-public class ServiceComponentHostSummary  {
+public class ServiceComponentHostSummary {
 
   private Collection<HostComponentStateEntity> allHostComponents;
   private Collection<HostComponentStateEntity> haveAdvertisedVersion;
@@ -44,15 +45,14 @@ public class ServiceComponentHostSummary  {
   private Collection<HostComponentStateEntity> noVersionToAdvertise;
   private Set<String> versions;
 
-
   public ServiceComponentHostSummary(AmbariMetaInfo ambariMetaInfo, HostEntity host, String stackName, String stackVersion) throws AmbariException {
     allHostComponents = host.getHostComponentStateEntities();
-    haveAdvertisedVersion = new HashSet<HostComponentStateEntity>();
-    waitingToAdvertiseVersion = new HashSet<HostComponentStateEntity>();
-    noVersionToAdvertise = new HashSet<HostComponentStateEntity>();
-    versions = new HashSet<String>();
+    haveAdvertisedVersion = new HashSet<>();
+    waitingToAdvertiseVersion = new HashSet<>();
+    noVersionToAdvertise = new HashSet<>();
+    versions = new HashSet<>();
 
-    for (HostComponentStateEntity hostComponentStateEntity: allHostComponents) {
+    for (HostComponentStateEntity hostComponentStateEntity : allHostComponents) {
       ComponentInfo compInfo = ambariMetaInfo.getComponent(
           stackName, stackVersion, hostComponentStateEntity.getServiceName(),
           hostComponentStateEntity.getComponentName());
@@ -61,12 +61,13 @@ public class ServiceComponentHostSummary  {
         // Some Components cannot advertise a version. E.g., ZKF, AMBARI_METRICS, Kerberos
         noVersionToAdvertise.add(hostComponentStateEntity);
       } else {
-        if (hostComponentStateEntity.getVersion() == null || hostComponentStateEntity.getVersion().isEmpty() || hostComponentStateEntity.getVersion().equalsIgnoreCase(State.UNKNOWN.toString())) {
+        if (hostComponentStateEntity.getUpgradeState().equals(UpgradeState.IN_PROGRESS) ||
+            hostComponentStateEntity.getVersion().equalsIgnoreCase(State.UNKNOWN.toString())) {
           waitingToAdvertiseVersion.add(hostComponentStateEntity);
         } else {
           haveAdvertisedVersion.add(hostComponentStateEntity);
           versions.add(hostComponentStateEntity.getVersion());
-        }
+        } // TODO: what if component reported wrong version?
       }
     }
   }
@@ -80,20 +81,20 @@ public class ServiceComponentHostSummary  {
   }
 
   public boolean isUpgradeFinished() {
-    return haveAllComponentsFinishedAdvertisingVersion() && haveSameVersion(getHaveAdvertisedVersion());
+    return haveAllComponentsFinishedAdvertisingVersion() && noComponentVersionMismatches(getHaveAdvertisedVersion());
   }
 
   /**
-   * @param currentRepoVersion Repo Version that is CURRENT for this host
+   * @param upgradeEntity Upgrade info about update on given host
    * @return Return true if multiple component versions are found for this host, or if it does not coincide with the
    * CURRENT repo version.
    */
-  public boolean isUpgradeInProgress(String currentRepoVersion) {
+  public boolean isUpgradeInProgress(UpgradeEntity upgradeEntity) {
     // Exactly one CURRENT version must exist
     // We can only detect an upgrade if the Host has at least one component that advertises a version and has done so already
     // If distinct versions have been advertises, then an upgrade is in progress.
     // If exactly one version has been advertises, but it doesn't coincide with the CURRENT HostVersion, then an upgrade is in progress.
-    return currentRepoVersion != null && (versions.size() > 1 || (versions.size() == 1 && !versions.iterator().next().equals(currentRepoVersion)));
+    return upgradeEntity != null;
   }
 
   /**
@@ -101,30 +102,20 @@ public class ServiceComponentHostSummary  {
    * @return Return a bool indicating if all components that can report a version have done so.
    */
   public boolean haveAllComponentsFinishedAdvertisingVersion() {
-    return waitingToAdvertiseVersion.size() == 0;
+    return waitingToAdvertiseVersion.isEmpty();
   }
 
   /**
-   * Checks that every component has the same version
+   * Checks that every component has really advertised version (in other words, we are not waiting
+   * for version advertising), and that no version mismatch occurred
    *
    * @param hostComponents host components
-   * @return true if components have the same version, or collection is empty, false otherwise.
+   * @return true if components have advertised the same version, or collection is empty, false otherwise.
    */
-  public static boolean haveSameVersion(Collection<HostComponentStateEntity> hostComponents) {
-    // It is important to return true even if the collection is empty because technically, there are no conflicts.
-    if (hostComponents.isEmpty()) {
-      return true;
-    }
-    String firstVersion = null;
+  public static boolean noComponentVersionMismatches(Collection<HostComponentStateEntity> hostComponents) {
     for (HostComponentStateEntity hostComponent : hostComponents) {
-      if (!hostComponent.getVersion().isEmpty()) {
-        if (firstVersion == null) {
-          firstVersion = hostComponent.getVersion();
-        } else {
-          if (!StringUtils.equals(firstVersion, hostComponent.getVersion())) {
-            return false;
-          }
-        }
+      if (UpgradeState.VERSION_NON_ADVERTISED_STATES.contains(hostComponent.getUpgradeState())) {
+        return false;
       }
     }
     return true;


[47/50] [abbrv] ambari git commit: Merge branch 'trunk' into branch-dev-patch-upgrade

Posted by nc...@apache.org.
Merge branch 'trunk' into branch-dev-patch-upgrade


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/e06d95d1
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/e06d95d1
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/e06d95d1

Branch: refs/heads/trunk
Commit: e06d95d1b779da2762e46de6c8d72a3523498401
Parents: cbef0c1 083ac6d
Author: Nate Cole <nc...@hortonworks.com>
Authored: Tue Feb 23 16:21:20 2016 -0500
Committer: Nate Cole <nc...@hortonworks.com>
Committed: Tue Feb 23 16:21:20 2016 -0500

----------------------------------------------------------------------
 .../loginActivities/LoginMessageMainCtrl.js     |   51 +-
 .../ui/admin-web/app/scripts/i18n.config.js     |    4 +-
 .../app/views/loginActivities/loginMessage.html |   14 +-
 .../src/main/repo/install_ambari_tarball.py     |    2 +-
 ambari-server/conf/unix/ca.config               |    3 +-
 ambari-server/conf/unix/install-helper.sh       |   28 +-
 ambari-server/conf/unix/log4j.properties        |    3 +-
 ambari-server/pom.xml                           |    1 +
 .../ambari/server/agent/HeartBeatHandler.java   |  550 +------
 .../ambari/server/agent/HeartbeatMonitor.java   |    6 +
 .../ambari/server/agent/HeartbeatProcessor.java |  773 +++++++++
 .../api/services/ActiveWidgetLayoutService.java |   10 +-
 .../api/services/UserAuthorizationService.java  |    4 +-
 .../api/services/UserPrivilegeService.java      |    3 +-
 .../ambari/server/api/services/UserService.java |    3 +-
 .../server/configuration/Configuration.java     |   23 +-
 .../ambari/server/controller/AmbariServer.java  |    6 +-
 .../ambari/server/orm/dao/HostVersionDAO.java   |   78 +-
 .../server/orm/entities/HostVersionEntity.java  |    9 +
 .../encryption/MasterKeyServiceImpl.java        |    3 +-
 .../server/state/cluster/ClusterImpl.java       |    6 +-
 .../svccomphost/ServiceComponentHostImpl.java   |   72 +-
 .../apache/ambari/server/utils/AmbariPath.java  |   39 +
 .../src/main/package/deb/control/postinst       |    2 +-
 .../src/main/package/deb/control/preinst        |   22 +-
 .../src/main/package/deb/control/prerm          |    2 +-
 .../src/main/package/rpm/postinstall.sh         |   10 +-
 .../src/main/package/rpm/posttrans_server.sh    |   10 +-
 .../src/main/package/rpm/preinstall.sh          |   19 +-
 ambari-server/src/main/package/rpm/preremove.sh |    6 +-
 .../server/agent/HeartbeatProcessorTest.java    | 1290 +++++++++++++++
 .../server/agent/HeartbeatTestHelper.java       |  229 +++
 .../server/agent/TestHeartbeatHandler.java      | 1488 ++----------------
 .../services/ActiveWidgetLayoutServiceTest.java |   76 +
 .../services/UserAuthorizationServiceTest.java  |   12 +
 .../api/services/UserPrivilegeServiceTest.java  |   13 +
 .../server/api/services/UserServiceTest.java    |   71 +
 .../resourceManager/step3_controller.js         |    2 +-
 .../main/admin/kerberos/step4_controller.js     |    3 +-
 .../main/admin/serviceAccounts_controller.js    |    5 +-
 .../controllers/main/service/info/configs.js    |   32 +-
 ambari-web/app/controllers/wizard.js            |    1 -
 .../app/controllers/wizard/step7_controller.js  |   66 +-
 .../configs/stack_config_properties_mapper.js   |    4 +-
 .../common/kdc_credentials_controller_mixin.js  |    2 +-
 ambari-web/app/models/stack_service.js          |   15 +-
 ambari-web/app/router.js                        |   11 +-
 ambari-web/app/utils.js                         |    1 +
 ambari-web/app/utils/config.js                  |  187 +--
 ambari-web/app/utils/configs/theme/theme.js     |  103 ++
 .../configs/widgets/config_widget_view.js       |    2 +-
 .../admin/kerberos/step4_controller_test.js     |    9 +-
 .../test/controllers/wizard/step7_test.js       |    7 +-
 ambari-web/test/utils/config_test.js            |  126 +-
 54 files changed, 3149 insertions(+), 2368 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/e06d95d1/ambari-admin/src/main/resources/ui/admin-web/app/scripts/i18n.config.js
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/e06d95d1/ambari-server/src/main/java/org/apache/ambari/server/agent/HeartBeatHandler.java
----------------------------------------------------------------------
diff --cc ambari-server/src/main/java/org/apache/ambari/server/agent/HeartBeatHandler.java
index a4136ee,ba14446..a13b421
--- a/ambari-server/src/main/java/org/apache/ambari/server/agent/HeartBeatHandler.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/agent/HeartBeatHandler.java
@@@ -339,448 -286,7 +286,6 @@@ public class HeartBeatHandler 
      host.setRecoveryReport(recoveryReport);
    }
  
-   protected void processHostStatus(HeartBeat heartbeat, String hostname) throws AmbariException {
- 
-     Host host = clusterFsm.getHost(hostname);
-     HealthStatus healthStatus = host.getHealthStatus().getHealthStatus();
- 
-     if (!healthStatus.equals(HostHealthStatus.HealthStatus.UNKNOWN)) {
- 
-       List<ComponentStatus> componentStatuses = heartbeat.getComponentStatus();
-       //Host status info could be calculated only if agent returned statuses in heartbeat
-       //Or, if a command is executed that can change component status
-       boolean calculateHostStatus = false;
-       String clusterName = null;
-       if (componentStatuses.size() > 0) {
-         calculateHostStatus = true;
-         for (ComponentStatus componentStatus : componentStatuses) {
-           clusterName = componentStatus.getClusterName();
-           break;
-         }
-       }
- 
-       if (!calculateHostStatus) {
-         List<CommandReport> reports = heartbeat.getReports();
-         for (CommandReport report : reports) {
-           if (RoleCommand.ACTIONEXECUTE.toString().equals(report.getRoleCommand())) {
-             continue;
-           }
- 
-           String service = report.getServiceName();
-           if (actionMetadata.getActions(service.toLowerCase()).contains(report.getRole())) {
-             continue;
-           }
-           if (report.getStatus().equals("COMPLETED")) {
-             calculateHostStatus = true;
-             clusterName = report.getClusterName();
-             break;
-           }
-         }
-       }
- 
-       if (calculateHostStatus) {
-         //Use actual component status to compute the host status
-         int masterCount = 0;
-         int mastersRunning = 0;
-         int slaveCount = 0;
-         int slavesRunning = 0;
- 
-         StackId stackId;
-         Cluster cluster = clusterFsm.getCluster(clusterName);
-         stackId = cluster.getDesiredStackVersion();
- 
-         MaintenanceStateHelper psh = injector.getInstance(MaintenanceStateHelper.class);
- 
-         List<ServiceComponentHost> scHosts = cluster.getServiceComponentHosts(heartbeat.getHostname());
-         for (ServiceComponentHost scHost : scHosts) {
-           ComponentInfo componentInfo =
-               ambariMetaInfo.getComponent(stackId.getStackName(),
-                   stackId.getStackVersion(), scHost.getServiceName(),
-                   scHost.getServiceComponentName());
- 
-           String status = scHost.getState().name();
- 
-           String category = componentInfo.getCategory();
- 
-           if (MaintenanceState.OFF == psh.getEffectiveState(scHost, host)) {
-             if (category.equals("MASTER")) {
-               ++masterCount;
-               if (status.equals("STARTED")) {
-                 ++mastersRunning;
-               }
-             } else if (category.equals("SLAVE")) {
-               ++slaveCount;
-               if (status.equals("STARTED")) {
-                 ++slavesRunning;
-               }
-             }
-           }
-         }
- 
-         if (masterCount == mastersRunning && slaveCount == slavesRunning) {
-           healthStatus = HealthStatus.HEALTHY;
-         } else if (masterCount > 0 && mastersRunning < masterCount) {
-           healthStatus = HealthStatus.UNHEALTHY;
-         } else {
-           healthStatus = HealthStatus.ALERT;
-         }
- 
-         host.setStatus(healthStatus.name());
-         host.persist();
-       }
- 
-       //If host doesn't belong to any cluster
-       if ((clusterFsm.getClustersForHost(host.getHostName())).size() == 0) {
-         healthStatus = HealthStatus.HEALTHY;
-         host.setStatus(healthStatus.name());
-         host.persist();
-       }
-     }
-   }
- 
-   protected void processCommandReports(
-       HeartBeat heartbeat, String hostname, Clusters clusterFsm, long now)
-       throws AmbariException {
-     List<CommandReport> reports = heartbeat.getReports();
- 
-     // Cache HostRoleCommand entities because we will need them few times
-     List<Long> taskIds = new ArrayList<Long>();
-     for (CommandReport report : reports) {
-       taskIds.add(report.getTaskId());
-     }
-     Collection<HostRoleCommand> commands = actionManager.getTasks(taskIds);
- 
-     Iterator<HostRoleCommand> hostRoleCommandIterator = commands.iterator();
-     for (CommandReport report : reports) {
- 
-       Long clusterId = null;
-       if (report.getClusterName() != null) {
-         try {
-           Cluster cluster = clusterFsm.getCluster(report.getClusterName());
-           clusterId = Long.valueOf(cluster.getClusterId());
-         } catch (AmbariException e) {
-         }
-       }
- 
-       LOG.debug("Received command report: " + report);
-       // Fetch HostRoleCommand that corresponds to a given task ID
-       HostRoleCommand hostRoleCommand = hostRoleCommandIterator.next();
-       HostEntity hostEntity = hostDAO.findByName(hostname);
-       if (hostEntity == null) {
-         LOG.error("Received a command report and was unable to retrieve HostEntity for hostname = " + hostname);
-         continue;
-       }
- 
-       // Send event for final command reports for actions
-       if (RoleCommand.valueOf(report.getRoleCommand()) == RoleCommand.ACTIONEXECUTE &&
-           HostRoleStatus.valueOf(report.getStatus()).isCompletedState()) {
-         ActionFinalReportReceivedEvent event = new ActionFinalReportReceivedEvent(
-                 clusterId, hostname, report, false);
-         ambariEventPublisher.publish(event);
-       }
- 
-       // Skip sending events for command reports for ABORTed commands
-       if (hostRoleCommand.getStatus() == HostRoleStatus.ABORTED) {
-         continue;
-       }
-       if (hostRoleCommand.getStatus() == HostRoleStatus.QUEUED &&
-               report.getStatus().equals("IN_PROGRESS")) {
-         hostRoleCommand.setStartTime(now);
-       }
- 
-       // If the report indicates the keytab file was successfully transferred to a host or removed
-       // from a host, record this for future reference
-       if (Service.Type.KERBEROS.name().equalsIgnoreCase(report.getServiceName()) &&
-           Role.KERBEROS_CLIENT.name().equalsIgnoreCase(report.getRole()) &&
-           RoleCommand.CUSTOM_COMMAND.name().equalsIgnoreCase(report.getRoleCommand()) &&
-           RequestExecution.Status.COMPLETED.name().equalsIgnoreCase(report.getStatus())) {
- 
-         String customCommand = report.getCustomCommand();
- 
-         boolean adding = "SET_KEYTAB".equalsIgnoreCase(customCommand);
-         if (adding || "REMOVE_KEYTAB".equalsIgnoreCase(customCommand)) {
-           WriteKeytabsStructuredOut writeKeytabsStructuredOut;
-           try {
-             writeKeytabsStructuredOut = gson.fromJson(report.getStructuredOut(), WriteKeytabsStructuredOut.class);
-           } catch (JsonSyntaxException ex) {
-             //Json structure was incorrect do nothing, pass this data further for processing
-             writeKeytabsStructuredOut = null;
-           }
- 
-           if (writeKeytabsStructuredOut != null) {
-             Map<String, String> keytabs = writeKeytabsStructuredOut.getKeytabs();
-             if (keytabs != null) {
-               for (Map.Entry<String, String> entry : keytabs.entrySet()) {
-                 String principal = entry.getKey();
-                 if (!kerberosPrincipalHostDAO.exists(principal, hostEntity.getHostId())) {
-                   if (adding) {
-                     kerberosPrincipalHostDAO.create(principal, hostEntity.getHostId());
-                   } else if ("_REMOVED_".equalsIgnoreCase(entry.getValue())) {
-                     kerberosPrincipalHostDAO.remove(principal, hostEntity.getHostId());
-                   }
-                 }
-               }
-             }
-           }
-         }
-       }
- 
-       //pass custom START, STOP and RESTART
-       if (RoleCommand.ACTIONEXECUTE.toString().equals(report.getRoleCommand()) ||
-          (RoleCommand.CUSTOM_COMMAND.toString().equals(report.getRoleCommand()) &&
-          !("RESTART".equals(report.getCustomCommand()) ||
-          "START".equals(report.getCustomCommand()) ||
-          "STOP".equals(report.getCustomCommand())))) {
-         continue;
-       }
- 
-       Cluster cl = clusterFsm.getCluster(report.getClusterName());
-       String service = report.getServiceName();
-       if (service == null || service.isEmpty()) {
-         throw new AmbariException("Invalid command report, service: " + service);
-       }
-       if (actionMetadata.getActions(service.toLowerCase()).contains(report.getRole())) {
-         LOG.debug(report.getRole() + " is an action - skip component lookup");
-       } else {
-         try {
-           Service svc = cl.getService(service);
-           ServiceComponent svcComp = svc.getServiceComponent(report.getRole());
-           ServiceComponentHost scHost = svcComp.getServiceComponentHost(hostname);
-           String schName = scHost.getServiceComponentName();
- 
-           if (report.getStatus().equals(HostRoleStatus.COMPLETED.toString())) {
- 
-             // Reading component version if it is present
-             if (StringUtils.isNotBlank(report.getStructuredOut())) {
-               ComponentVersionStructuredOut structuredOutput = null;
-               try {
-                 structuredOutput = gson.fromJson(report.getStructuredOut(), ComponentVersionStructuredOut.class);
-               } catch (JsonSyntaxException ex) {
-                 //Json structure for component version was incorrect
-                 //do nothing, pass this data further for processing
-               }
- 
-               String newVersion = structuredOutput == null ? null : structuredOutput.version;
- 
-               HostComponentVersionAdvertisedEvent event = new HostComponentVersionAdvertisedEvent(cl, scHost, newVersion);
-               versionEventPublisher.publish(event);
-             }
- 
-             // Updating stack version, if needed (this is not actually for express/rolling upgrades!)
-             if (scHost.getState().equals(State.UPGRADING)) {
-               scHost.setStackVersion(scHost.getDesiredStackVersion());
-             } else if ((report.getRoleCommand().equals(RoleCommand.START.toString()) ||
-                 (report.getRoleCommand().equals(RoleCommand.CUSTOM_COMMAND.toString()) &&
-                     ("START".equals(report.getCustomCommand()) ||
-                     "RESTART".equals(report.getCustomCommand()))))
-                 && null != report.getConfigurationTags()
-                 && !report.getConfigurationTags().isEmpty()) {
-               LOG.info("Updating applied config on service " + scHost.getServiceName() +
-                 ", component " + scHost.getServiceComponentName() + ", host " + scHost.getHostName());
-               scHost.updateActualConfigs(report.getConfigurationTags());
-               scHost.setRestartRequired(false);
-             }
-             // Necessary for resetting clients stale configs after starting service
-             if ((RoleCommand.INSTALL.toString().equals(report.getRoleCommand()) ||
-                 (RoleCommand.CUSTOM_COMMAND.toString().equals(report.getRoleCommand()) &&
-                 "INSTALL".equals(report.getCustomCommand()))) && svcComp.isClientComponent()){
-               scHost.updateActualConfigs(report.getConfigurationTags());
-               scHost.setRestartRequired(false);
-             }
-             if (RoleCommand.CUSTOM_COMMAND.toString().equals(report.getRoleCommand()) &&
-                 !("START".equals(report.getCustomCommand()) ||
-                  "STOP".equals(report.getCustomCommand()))) {
-               //do not affect states for custom commands except START and STOP
-               //lets status commands to be responsible for this
-               continue;
-             }
- 
-             if (RoleCommand.START.toString().equals(report.getRoleCommand()) ||
-                 (RoleCommand.CUSTOM_COMMAND.toString().equals(report.getRoleCommand()) &&
-                     "START".equals(report.getCustomCommand()))) {
-               scHost.handleEvent(new ServiceComponentHostStartedEvent(schName,
-                   hostname, now));
-               scHost.setRestartRequired(false);
-             } else if (RoleCommand.STOP.toString().equals(report.getRoleCommand()) ||
-                 (RoleCommand.CUSTOM_COMMAND.toString().equals(report.getRoleCommand()) &&
-                     "STOP".equals(report.getCustomCommand()))) {
-               scHost.handleEvent(new ServiceComponentHostStoppedEvent(schName,
-                   hostname, now));
-             } else {
-               scHost.handleEvent(new ServiceComponentHostOpSucceededEvent(schName,
-                   hostname, now));
-             }
-           } else if (report.getStatus().equals("FAILED")) {
- 
-             if (StringUtils.isNotBlank(report.getStructuredOut())) {
-               try {
-                 ComponentVersionStructuredOut structuredOutput = gson.fromJson(report.getStructuredOut(), ComponentVersionStructuredOut.class);
- 
-                 if (null != structuredOutput.upgradeDirection) {
-                   // TODO: backward compatibility: now state is set to FAILED also during downgrade
-                   scHost.setUpgradeState(UpgradeState.FAILED);
-                 }
-               } catch (JsonSyntaxException ex) {
-                 LOG.warn("Structured output was found, but not parseable: {}", report.getStructuredOut());
-               }
-             }
- 
-             LOG.warn("Operation failed - may be retried. Service component host: "
-                 + schName + ", host: " + hostname + " Action id" + report.getActionId());
-             if (actionManager.isInProgressCommand(report)) {
-               scHost.handleEvent(new ServiceComponentHostOpFailedEvent
-                 (schName, hostname, now));
-             } else {
-               LOG.info("Received report for a command that is no longer active. " + report);
-             }
-           } else if (report.getStatus().equals("IN_PROGRESS")) {
-             scHost.handleEvent(new ServiceComponentHostOpInProgressEvent(schName,
-                 hostname, now));
-           }
-         } catch (ServiceComponentNotFoundException scnex) {
-           LOG.warn("Service component not found ", scnex);
-         } catch (InvalidStateTransitionException ex) {
-           if (LOG.isDebugEnabled()) {
-             LOG.warn("State machine exception.", ex);
-           } else {
-             LOG.warn("State machine exception. " + ex.getMessage());
-           }
-         }
-       }
-     }
- 
-     //Update state machines from reports
-     actionManager.processTaskResponse(hostname, reports, commands);
-   }
- 
-   protected void processStatusReports(HeartBeat heartbeat,
-                                       String hostname,
-                                       Clusters clusterFsm)
-       throws AmbariException {
-     Set<Cluster> clusters = clusterFsm.getClustersForHost(hostname);
-     for (Cluster cl : clusters) {
-       for (ComponentStatus status : heartbeat.componentStatus) {
-         if (status.getClusterName().equals(cl.getClusterName())) {
-           try {
-             Service svc = cl.getService(status.getServiceName());
- 
-             String componentName = status.getComponentName();
-             if (svc.getServiceComponents().containsKey(componentName)) {
-               ServiceComponent svcComp = svc.getServiceComponent(
-                   componentName);
-               ServiceComponentHost scHost = svcComp.getServiceComponentHost(
-                   hostname);
-               State prevState = scHost.getState();
-               State liveState = State.valueOf(State.class, status.getStatus());
-               if (prevState.equals(State.INSTALLED)
-                   || prevState.equals(State.STARTED)
-                   || prevState.equals(State.STARTING)
-                   || prevState.equals(State.STOPPING)
-                   || prevState.equals(State.UNKNOWN)) {
-                 scHost.setState(liveState); //TODO direct status set breaks state machine sometimes !!!
-                 if (!prevState.equals(liveState)) {
-                   LOG.info("State of service component " + componentName
-                       + " of service " + status.getServiceName()
-                       + " of cluster " + status.getClusterName()
-                       + " has changed from " + prevState + " to " + liveState
-                       + " at host " + hostname);
-                 }
-               }
- 
-               SecurityState prevSecurityState = scHost.getSecurityState();
-               SecurityState currentSecurityState = SecurityState.valueOf(status.getSecurityState());
-               if((prevSecurityState != currentSecurityState)) {
-                 if(prevSecurityState.isEndpoint()) {
-                   scHost.setSecurityState(currentSecurityState);
-                   LOG.info(String.format("Security of service component %s of service %s of cluster %s " +
-                           "has changed from %s to %s on host %s",
-                       componentName, status.getServiceName(), status.getClusterName(), prevSecurityState,
-                       currentSecurityState, hostname));
-                 }
-                 else {
-                   LOG.debug(String.format("Security of service component %s of service %s of cluster %s " +
-                           "has changed from %s to %s on host %s but will be ignored since %s is a " +
-                           "transitional state",
-                       componentName, status.getServiceName(), status.getClusterName(),
-                       prevSecurityState, currentSecurityState, hostname, prevSecurityState));
-                 }
-               }
- 
-               if (null != status.getStackVersion() && !status.getStackVersion().isEmpty()) {
-                 scHost.setStackVersion(gson.fromJson(status.getStackVersion(), StackId.class));
-               }
- 
-               if (null != status.getConfigTags()) {
-                 scHost.updateActualConfigs(status.getConfigTags());
-               }
- 
-               Map<String, Object> extra = status.getExtra();
-               if (null != extra && !extra.isEmpty()) {
-                 try {
-                   if (extra.containsKey("processes")) {
-                     @SuppressWarnings("unchecked")
-                     List<Map<String, String>> list = (List<Map<String, String>>) extra.get("processes");
-                     scHost.setProcesses(list);
-                   }
-                   if (extra.containsKey("version")) {
-                     String version = extra.get("version").toString();
- 
-                     HostComponentVersionAdvertisedEvent event = new HostComponentVersionAdvertisedEvent(cl, scHost, version);
-                     versionEventPublisher.publish(event);
-                   }
- 
-                 } catch (Exception e) {
-                   LOG.error("Could not access extra JSON for " +
-                       scHost.getServiceComponentName() + " from " +
-                       scHost.getHostName() + ": " + status.getExtra() +
-                       " (" + e.getMessage() + ")");
-                 }
-               }
- 
-               this.heartbeatMonitor.getAgentRequests()
-                   .setExecutionDetailsRequest(hostname, componentName, status.getSendExecCmdDet());
-             } else {
-               // TODO: What should be done otherwise?
-             }
-           } catch (ServiceNotFoundException e) {
-             LOG.warn("Received a live status update for a non-initialized"
-                 + " service"
-                 + ", clusterName=" + status.getClusterName()
-                 + ", serviceName=" + status.getServiceName());
-             // FIXME ignore invalid live update and continue for now?
-             continue;
-           } catch (ServiceComponentNotFoundException e) {
-             LOG.warn("Received a live status update for a non-initialized"
-                 + " servicecomponent"
-                 + ", clusterName=" + status.getClusterName()
-                 + ", serviceName=" + status.getServiceName()
-                 + ", componentName=" + status.getComponentName());
-             // FIXME ignore invalid live update and continue for now?
-             continue;
-           } catch (ServiceComponentHostNotFoundException e) {
-             LOG.warn("Received a live status update for a non-initialized"
-                 + " service"
-                 + ", clusterName=" + status.getClusterName()
-                 + ", serviceName=" + status.getServiceName()
-                 + ", componentName=" + status.getComponentName()
-                 + ", hostname=" + hostname);
-             // FIXME ignore invalid live update and continue for now?
-             continue;
-           } catch (RuntimeException e) {
-             LOG.warn("Received a live status with invalid payload"
-                 + " service"
-                 + ", clusterName=" + status.getClusterName()
-                 + ", serviceName=" + status.getServiceName()
-                 + ", componentName=" + status.getComponentName()
-                 + ", hostname=" + hostname
-                 + ", error=" + e.getMessage());
-             continue;
-           }
-         }
-       }
-     }
-   }
--
    /**
     * Adds commands from action queue to a heartbeat response.
     */

http://git-wip-us.apache.org/repos/asf/ambari/blob/e06d95d1/ambari-server/src/main/java/org/apache/ambari/server/configuration/Configuration.java
----------------------------------------------------------------------
diff --cc ambari-server/src/main/java/org/apache/ambari/server/configuration/Configuration.java
index 221b83d,1cb935b..c63d043
--- a/ambari-server/src/main/java/org/apache/ambari/server/configuration/Configuration.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/configuration/Configuration.java
@@@ -43,9 -34,10 +43,10 @@@ import org.apache.ambari.server.orm.ent
  import org.apache.ambari.server.security.ClientSecurityType;
  import org.apache.ambari.server.security.authorization.LdapServerProperties;
  import org.apache.ambari.server.security.authorization.jwt.JwtAuthenticationProperties;
 +import org.apache.ambari.server.security.encryption.CertificateUtils;
  import org.apache.ambari.server.security.encryption.CredentialProvider;
  import org.apache.ambari.server.state.stack.OsFamily;
 -import org.apache.ambari.server.security.encryption.CertificateUtils;
+ import org.apache.ambari.server.utils.AmbariPath;
  import org.apache.ambari.server.utils.Parallel;
  import org.apache.ambari.server.utils.ShellCommandUtil;
  import org.apache.commons.io.FileUtils;

http://git-wip-us.apache.org/repos/asf/ambari/blob/e06d95d1/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClusterImpl.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/e06d95d1/ambari-server/src/main/java/org/apache/ambari/server/state/svccomphost/ServiceComponentHostImpl.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/e06d95d1/ambari-server/src/test/java/org/apache/ambari/server/agent/TestHeartbeatHandler.java
----------------------------------------------------------------------
diff --cc ambari-server/src/test/java/org/apache/ambari/server/agent/TestHeartbeatHandler.java
index b6d51af,c62352a..e29e23e
--- a/ambari-server/src/test/java/org/apache/ambari/server/agent/TestHeartbeatHandler.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/agent/TestHeartbeatHandler.java
@@@ -2366,160 -1394,7 +1394,6 @@@ public class TestHeartbeatHandler 
      return componentStatus1;
    }
  
-   private HeartBeatHandler getHeartBeatHandler(ActionManager am, ActionQueue aq)
-       throws InvalidStateTransitionException, AmbariException {
-     HeartBeatHandler handler = new HeartBeatHandler(clusters, aq, am, injector);
-     Register reg = new Register();
-     HostInfo hi = new HostInfo();
-     hi.setHostName(DummyHostname1);
-     hi.setOS(DummyOs);
-     hi.setOSRelease(DummyOSRelease);
-     reg.setHostname(DummyHostname1);
-     reg.setResponseId(0);
-     reg.setHardwareProfile(hi);
-     reg.setAgentVersion(metaInfo.getServerVersion());
-     handler.handleRegistration(reg);
-     return handler;
-   }
- 
-   private Cluster getDummyCluster()
-       throws AmbariException {
-     StackEntity stackEntity = stackDAO.find(HDP_22_STACK.getStackName(), HDP_22_STACK.getStackVersion());
-     org.junit.Assert.assertNotNull(stackEntity);
- 
-     // Create the cluster
-     ResourceTypeEntity resourceTypeEntity = new ResourceTypeEntity();
-     resourceTypeEntity.setId(ResourceType.CLUSTER.getId());
-     resourceTypeEntity.setName(ResourceType.CLUSTER.name());
-     resourceTypeEntity = resourceTypeDAO.merge(resourceTypeEntity);
- 
-     ResourceEntity resourceEntity = new ResourceEntity();
-     resourceEntity.setResourceType(resourceTypeEntity);
- 
-     ClusterEntity clusterEntity = new ClusterEntity();
-     clusterEntity.setClusterName(DummyCluster);
-     clusterEntity.setClusterInfo("test_cluster_info1");
-     clusterEntity.setResource(resourceEntity);
-     clusterEntity.setDesiredStack(stackEntity);
- 
-     clusterDAO.create(clusterEntity);
- 
-     StackId stackId = new StackId(DummyStackId);
- 
-     Cluster cluster = clusters.getCluster(DummyCluster);
- 
-     cluster.setDesiredStackVersion(stackId);
-     cluster.setCurrentStackVersion(stackId);
-     helper.getOrCreateRepositoryVersion(stackId, stackId.getStackVersion());
-     cluster.createClusterVersion(stackId, stackId.getStackVersion(), "admin",
-         RepositoryVersionState.INSTALLING);
- 
-     Set<String> hostNames = new HashSet<String>(){{
-       add(DummyHostname1);
-     }};
- 
-     Map<String, String> hostAttributes = new HashMap<String, String>();
-     hostAttributes.put("os_family", "redhat");
-     hostAttributes.put("os_release_version", "6.3");
- 
-     List<HostEntity> hostEntities = new ArrayList<HostEntity>();
-     for(String hostName : hostNames) {
-       clusters.addHost(hostName);
-       Host host = clusters.getHost(hostName);
-       host.setHostAttributes(hostAttributes);
-       host.persist();
- 
-       HostEntity hostEntity = hostDAO.findByName(hostName);
-       Assert.assertNotNull(hostEntity);
-       hostEntities.add(hostEntity);
-     }
-     clusterEntity.setHostEntities(hostEntities);
-     clusters.mapHostsToCluster(hostNames, DummyCluster);
- 
-     return cluster;
-   }
- 
-   @Test
-   @SuppressWarnings("unchecked")
-   public void testCommandStatusProcesses() throws Exception {
-     Cluster cluster = getDummyCluster();
-     Service hdfs = cluster.addService(HDFS);
-     hdfs.persist();
-     hdfs.addServiceComponent(DATANODE).persist();
-     hdfs.getServiceComponent(DATANODE).addServiceComponentHost(DummyHostname1).persist();
-     hdfs.getServiceComponent(DATANODE).getServiceComponentHost(DummyHostname1).setState(State.STARTED);
- 
-     ActionQueue aq = new ActionQueue();
- 
-     HeartBeat hb = new HeartBeat();
-     hb.setTimestamp(System.currentTimeMillis());
-     hb.setResponseId(0);
-     hb.setHostname(DummyHostname1);
-     hb.setNodeStatus(new HostStatus(Status.HEALTHY, DummyHostStatus));
-     hb.setReports(new ArrayList<CommandReport>());
- 
-     List<Map<String, String>> procs = new ArrayList<Map<String, String>>();
-     Map<String, String> proc1info = new HashMap<String, String>();
-     proc1info.put("name", "a");
-     proc1info.put("status", "RUNNING");
-     procs.add(proc1info);
- 
-     Map<String, String> proc2info = new HashMap<String, String>();
-     proc2info.put("name", "b");
-     proc2info.put("status", "NOT_RUNNING");
-     procs.add(proc2info);
- 
-     Map<String, Object> extra = new HashMap<String, Object>();
-     extra.put("processes", procs);
- 
-     ArrayList<ComponentStatus> componentStatuses = new ArrayList<ComponentStatus>();
-     ComponentStatus componentStatus1 = new ComponentStatus();
-     componentStatus1.setClusterName(DummyCluster);
-     componentStatus1.setServiceName(HDFS);
-     componentStatus1.setMessage(DummyHostStatus);
-     componentStatus1.setStatus(State.STARTED.name());
-     componentStatus1.setSecurityState(SecurityState.UNSECURED.name());
-     componentStatus1.setComponentName(DATANODE);
- 
-     componentStatus1.setExtra(extra);
-     componentStatuses.add(componentStatus1);
-     hb.setComponentStatus(componentStatuses);
- 
-     final HostRoleCommand command = hostRoleCommandFactory.create(DummyHostname1,
-             Role.DATANODE, null, null);
- 
-     ActionManager am = getMockActionManager();
-     expect(am.getTasks(anyObject(List.class))).andReturn(
-             new ArrayList<HostRoleCommand>() {{
-               add(command);
-             }}).anyTimes();
-     replay(am);
- 
-     HeartBeatHandler handler = getHeartBeatHandler(am, aq);
-     handler.handleHeartBeat(hb);
-     ServiceComponentHost sch = hdfs.getServiceComponent(DATANODE).getServiceComponentHost(DummyHostname1);
- 
-     Assert.assertEquals(Integer.valueOf(2), Integer.valueOf(sch.getProcesses().size()));
- 
-     hb = new HeartBeat();
-     hb.setTimestamp(System.currentTimeMillis());
-     hb.setResponseId(1);
-     hb.setHostname(DummyHostname1);
-     hb.setNodeStatus(new HostStatus(Status.HEALTHY, DummyHostStatus));
-     hb.setReports(new ArrayList<CommandReport>());
- 
-     componentStatus1 = new ComponentStatus();
-     componentStatus1.setClusterName(DummyCluster);
-     componentStatus1.setServiceName(HDFS);
-     componentStatus1.setMessage(DummyHostStatus);
-     componentStatus1.setStatus(State.STARTED.name());
-     componentStatus1.setSecurityState(SecurityState.UNSECURED.name());
-     componentStatus1.setComponentName(DATANODE);
-     hb.setComponentStatus(Collections.singletonList(componentStatus1));
- 
-     handler.handleHeartBeat(hb);
-   }
--
    @Test
    @SuppressWarnings("unchecked")
    public void testCommandStatusProcesses_empty() throws Exception {