You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by jo...@apache.org on 2016/10/05 00:00:02 UTC

[1/8] ambari git commit: AMBARI-18506 Ambari should present message if stack upgrade path is not available

Repository: ambari
Updated Branches:
  refs/heads/branch-feature-AMBARI-18456 23fbfe480 -> 5467ad073


AMBARI-18506 Ambari should present message if stack upgrade path is not available


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/26949484
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/26949484
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/26949484

Branch: refs/heads/branch-feature-AMBARI-18456
Commit: 269494849cad6d0a56dddaf52316e1d84b08d49f
Parents: 98ba7e0
Author: Andrii Tkach <at...@apache.org>
Authored: Fri Sep 30 17:04:30 2016 +0300
Committer: Andrii Tkach <at...@apache.org>
Committed: Tue Oct 4 17:11:46 2016 +0300

----------------------------------------------------------------------
 ambari-web/app/assets/test/tests.js             |  1 +
 .../main/admin/stack_and_upgrade_controller.js  | 45 +++++++++---
 .../app/mappers/repository_version_mapper.js    | 28 +++++++-
 ambari-web/app/messages.js                      |  1 +
 .../models/stack_version/repository_version.js  |  6 ++
 .../admin/stack_upgrade/upgrade_version_box.hbs |  2 +-
 .../stack_upgrade/upgrade_version_column.hbs    |  2 +-
 ambari-web/app/utils/ajax/ajax.js               |  5 ++
 .../stack_upgrade/upgrade_version_box_view.js   |  6 ++
 .../upgrade_version_column_view.js              |  6 +-
 .../admin/stack_and_upgrade_controller_test.js  | 74 +++++++++++++++++++-
 .../mappers/repository_version_mapper_test.js   | 46 ++++++++++++
 .../upgrade_version_box_view_test.js            | 23 ++++--
 13 files changed, 223 insertions(+), 22 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/26949484/ambari-web/app/assets/test/tests.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/assets/test/tests.js b/ambari-web/app/assets/test/tests.js
index 78c6196..92def58 100644
--- a/ambari-web/app/assets/test/tests.js
+++ b/ambari-web/app/assets/test/tests.js
@@ -148,6 +148,7 @@ var files = [
   'test/mappers/users_mapper_test',
   'test/mappers/stack_mapper_test',
   'test/mappers/stack_service_mapper_test',
+  'test/mappers/repository_version_mapper_test',
   'test/mappers/stack_upgrade_history_mapper_test',
   'test/mappers/configs/config_groups_mapper_test',
   'test/mappers/configs/service_config_version_mapper_test',

http://git-wip-us.apache.org/repos/asf/ambari/blob/26949484/ambari-web/app/controllers/main/admin/stack_and_upgrade_controller.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/controllers/main/admin/stack_and_upgrade_controller.js b/ambari-web/app/controllers/main/admin/stack_and_upgrade_controller.js
index bbf7523..56a7a61 100644
--- a/ambari-web/app/controllers/main/admin/stack_and_upgrade_controller.js
+++ b/ambari-web/app/controllers/main/admin/stack_and_upgrade_controller.js
@@ -241,8 +241,8 @@ App.MainAdminStackAndUpgradeController = Em.Controller.extend(App.LocalStorage,
    * @type {String}
    */
   realRepoUrl: function () {
-    return App.get('apiPrefix') + App.get('stackVersionURL') +
-      '/compatible_repository_versions?fields=*,operating_systems/*,operating_systems/repositories/*';
+    return App.get('apiPrefix') + '/stacks?fields=versions/repository_versions/RepositoryVersions,' +
+      'versions/repository_versions/operating_systems/*,versions/repository_versions/operating_systems/repositories/*';
   }.property('App.stackVersionURL'),
 
   /**
@@ -329,14 +329,16 @@ App.MainAdminStackAndUpgradeController = Em.Controller.extend(App.LocalStorage,
     this.loadUpgradeData(true).done(function() {
       self.loadStackVersionsToModel(true).done(function () {
         self.loadRepoVersionsToModel().done(function() {
-          var currentVersion = App.StackVersion.find().findProperty('state', 'CURRENT');
-          if (currentVersion) {
-            self.set('currentVersion', {
-              repository_version: currentVersion.get('repositoryVersion.repositoryVersion'),
-              repository_name: currentVersion.get('repositoryVersion.displayName')
-            });
-          }
-          dfd.resolve();
+          self.loadCompatibleVersions().done(function() {
+            var currentVersion = App.StackVersion.find().findProperty('state', 'CURRENT');
+            if (currentVersion) {
+              self.set('currentVersion', {
+                repository_version: currentVersion.get('repositoryVersion.repositoryVersion'),
+                repository_name: currentVersion.get('repositoryVersion.displayName')
+              });
+            }
+            dfd.resolve();
+          });
         });
       });
     });
@@ -395,6 +397,29 @@ App.MainAdminStackAndUpgradeController = Em.Controller.extend(App.LocalStorage,
     }
   },
 
+  loadCompatibleVersions: function() {
+    return App.ajax.send({
+      name: 'admin.upgrade.get_compatible_versions',
+      sender: this,
+      data: {
+        stackName: App.get('currentStackName'),
+        stackVersion: App.get('currentStackVersionNumber')
+      },
+      success: 'loadCompatibleVersionsSuccessCallback'
+    });
+  },
+
+  /**
+   *
+   * @param {object} data
+   */
+  loadCompatibleVersionsSuccessCallback: function(data) {
+    App.RepositoryVersion.find().forEach(function(repo) {
+      var version = repo.get('repositoryVersion');
+      repo.set('isCompatible', data.items.someProperty('CompatibleRepositoryVersions.repository_version', version));
+    });
+  },
+
   /**
    * update data of Upgrade
    * @param {object} newData

http://git-wip-us.apache.org/repos/asf/ambari/blob/26949484/ambari-web/app/mappers/repository_version_mapper.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/mappers/repository_version_mapper.js b/ambari-web/app/mappers/repository_version_mapper.js
index 19ff672..48e460b 100644
--- a/ambari-web/app/mappers/repository_version_mapper.js
+++ b/ambari-web/app/mappers/repository_version_mapper.js
@@ -24,7 +24,7 @@ App.repoVersionMapper = App.QuickDataMapper.create({
   modelServices: App.ServiceSimple,
 
   modelRepoVersion: function (isCurrentStackOnly) {
-    var repoVersionsKey = isCurrentStackOnly ? 'RepositoryVersions' : 'CompatibleRepositoryVersions';
+    var repoVersionsKey = 'RepositoryVersions';
     return {
       id: repoVersionsKey + '.id',
       stack_version_id: repoVersionsKey + '.stackVersionId',
@@ -92,7 +92,11 @@ App.repoVersionMapper = App.QuickDataMapper.create({
     var resultOS = [];
     var resultRepo = [];
     var resultService = [];
-    var repoVersionsKey = isCurrentStackOnly ? 'RepositoryVersions' : 'CompatibleRepositoryVersions';
+    var repoVersionsKey = 'RepositoryVersions';
+
+    if (!isCurrentStackOnly) {
+      json = this.convertToRepoScheme(json);
+    }
 
     if (json && json.items) {
       json.items.forEach(function (item) {
@@ -153,5 +157,25 @@ App.repoVersionMapper = App.QuickDataMapper.create({
     App.store.loadMany(modelOperatingSystems, resultOS);
     App.store.loadMany(modelServices, resultService);
     App.store.loadMany(modelRepoVersions, resultRepoVersion);
+  },
+
+  /**
+   *
+   * @param {?object} json
+   * @returns {{items: Array}}
+   */
+  convertToRepoScheme: function(json) {
+    var extractedJson = {items: []};
+
+    if (json && json.items) {
+      json.items.forEach(function(stack) {
+        stack.versions.forEach(function(version) {
+          version.repository_versions.forEach(function(repoVersion) {
+            extractedJson.items.push(repoVersion);
+          }, this);
+        }, this);
+      }, this);
+    }
+    return extractedJson;
   }
 });

http://git-wip-us.apache.org/repos/asf/ambari/blob/26949484/ambari-web/app/messages.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/messages.js b/ambari-web/app/messages.js
index 1c53839..5eafad2 100644
--- a/ambari-web/app/messages.js
+++ b/ambari-web/app/messages.js
@@ -1653,6 +1653,7 @@ Em.I18n.translations = {
 
   'admin.stackVersions.version.column.showDetails': "Show Details",
   'admin.stackVersions.version.column.showDetails.title': "Version Details",
+  'admin.stackVersions.version.noCompatible.tooltip': 'Directly upgrading to this version is not supported.',
 
   'admin.stackVersions.hosts.popup.header.current': "Current",
   'admin.stackVersions.hosts.popup.header.installed': "Installed",

http://git-wip-us.apache.org/repos/asf/ambari/blob/26949484/ambari-web/app/models/stack_version/repository_version.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/models/stack_version/repository_version.js b/ambari-web/app/models/stack_version/repository_version.js
index 759470d..d5bc64d 100644
--- a/ambari-web/app/models/stack_version/repository_version.js
+++ b/ambari-web/app/models/stack_version/repository_version.js
@@ -41,6 +41,12 @@ App.RepositoryVersion = DS.Model.extend({
   defaultStatus: 'INIT',
 
   /**
+   * @type {boolean}
+   * @default false
+   */
+  isCompatible: false,
+
+  /**
    * @type {string}
    */
   status: Em.computed.firstNotBlank('stackVersion.state', 'defaultStatus'),

http://git-wip-us.apache.org/repos/asf/ambari/blob/26949484/ambari-web/app/templates/main/admin/stack_upgrade/upgrade_version_box.hbs
----------------------------------------------------------------------
diff --git a/ambari-web/app/templates/main/admin/stack_upgrade/upgrade_version_box.hbs b/ambari-web/app/templates/main/admin/stack_upgrade/upgrade_version_box.hbs
index dcc6944..72bce5c 100644
--- a/ambari-web/app/templates/main/admin/stack_upgrade/upgrade_version_box.hbs
+++ b/ambari-web/app/templates/main/admin/stack_upgrade/upgrade_version_box.hbs
@@ -35,7 +35,7 @@
 </p>
 
 
-<div {{bindAttr class="view.stateElement.isInstalling:installing :align-center :state"}}>
+<div {{bindAttr class="view.stateElement.isInstalling:installing :align-center :state :repo-version-tooltip"}}>
   {{#if view.stateElement.isButton}}
     <button class="btn btn-primary"
       {{action runAction target="view"}}

http://git-wip-us.apache.org/repos/asf/ambari/blob/26949484/ambari-web/app/templates/main/admin/stack_upgrade/upgrade_version_column.hbs
----------------------------------------------------------------------
diff --git a/ambari-web/app/templates/main/admin/stack_upgrade/upgrade_version_column.hbs b/ambari-web/app/templates/main/admin/stack_upgrade/upgrade_version_column.hbs
index 0878733..52a42fc 100644
--- a/ambari-web/app/templates/main/admin/stack_upgrade/upgrade_version_column.hbs
+++ b/ambari-web/app/templates/main/admin/stack_upgrade/upgrade_version_column.hbs
@@ -27,7 +27,7 @@
 <p class="version-column-show-details">
   <a {{action openVersionBoxPopup target="view"}}>{{t admin.stackVersions.version.column.showDetails}}</a>
 </p>
-<div {{bindAttr class="view.stateElement.isInstalling:installing :align-center :state"}}>
+<div {{bindAttr class="view.stateElement.isInstalling:installing :align-center :state :repo-version-tooltip"}}>
   {{#if view.stateElement.isButton}}
     <button class="btn btn-primary"
       {{action runAction target="view"}}

http://git-wip-us.apache.org/repos/asf/ambari/blob/26949484/ambari-web/app/utils/ajax/ajax.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/utils/ajax/ajax.js b/ambari-web/app/utils/ajax/ajax.js
index 7eb9c9e..a5eafef 100644
--- a/ambari-web/app/utils/ajax/ajax.js
+++ b/ambari-web/app/utils/ajax/ajax.js
@@ -1851,6 +1851,11 @@ var urls = {
     'mock': '/data/stack_versions/supported_upgrade_types.json'
   },
 
+  'admin.upgrade.get_compatible_versions': {
+    'real': '/stacks/{stackName}/versions/{stackVersion}/compatible_repository_versions?fields=CompatibleRepositoryVersions/repository_version&minimal_response=true',
+    'mock': '/data/stack_versions/supported_upgrade_types.json'
+  },
+
   'admin.kerberos_security.checks': {
     //TODO when api will be known
     'real': '',

http://git-wip-us.apache.org/repos/asf/ambari/blob/26949484/ambari-web/app/views/main/admin/stack_upgrade/upgrade_version_box_view.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views/main/admin/stack_upgrade/upgrade_version_box_view.js b/ambari-web/app/views/main/admin/stack_upgrade/upgrade_version_box_view.js
index 1e549cd..bf15f33 100644
--- a/ambari-web/app/views/main/admin/stack_upgrade/upgrade_version_box_view.js
+++ b/ambari-web/app/views/main/admin/stack_upgrade/upgrade_version_box_view.js
@@ -242,6 +242,7 @@ App.UpgradeVersionBoxView = Em.View.extend({
    */
   isDisabledOnInit: function() {
     return  this.get('controller.requestInProgress') ||
+            !this.get('content.isCompatible') ||
             (App.get('upgradeIsRunning') && !App.get('upgradeSuspended')) ||
             this.get('parentView.repoVersions').someProperty('status', 'INSTALLING');
   },
@@ -263,6 +264,11 @@ App.UpgradeVersionBoxView = Em.View.extend({
     App.tooltip($('.link-tooltip'), {title: Em.I18n.t('admin.stackVersions.version.linkTooltip')});
     App.tooltip($('.hosts-tooltip'));
     App.tooltip($('.out-of-sync-badge'), {title: Em.I18n.t('hosts.host.stackVersions.status.out_of_sync')});
+    if (!this.get('content.isCompatible')) {
+      App.tooltip(this.$(".repo-version-tooltip"), {
+        title: Em.I18n.t('admin.stackVersions.version.noCompatible.tooltip')
+      });
+    }
     Em.run.later(this, function () {
       if (this.get('state') !== 'inDOM') {
         return;

http://git-wip-us.apache.org/repos/asf/ambari/blob/26949484/ambari-web/app/views/main/admin/stack_upgrade/upgrade_version_column_view.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views/main/admin/stack_upgrade/upgrade_version_column_view.js b/ambari-web/app/views/main/admin/stack_upgrade/upgrade_version_column_view.js
index 6290a5e..f6f86db 100644
--- a/ambari-web/app/views/main/admin/stack_upgrade/upgrade_version_column_view.js
+++ b/ambari-web/app/views/main/admin/stack_upgrade/upgrade_version_column_view.js
@@ -26,7 +26,11 @@ App.UpgradeVersionColumnView = App.UpgradeVersionBoxView.extend({
 
   didInsertElement: function () {
     App.tooltip($('.out-of-sync-badge'), {title: Em.I18n.t('hosts.host.stackVersions.status.out_of_sync')});
-
+    if (!this.get('content.isCompatible')) {
+      App.tooltip(this.$(".repo-version-tooltip"), {
+        title: Em.I18n.t('admin.stackVersions.version.noCompatible.tooltip')
+      });
+    }
     //set the width, height of each version colum dynamically
     var widthFactor = App.RepositoryVersion.find().get('length') > 3 ? 0.18: 0.31;
     $('.version-column').width($('.versions-slides').width() * widthFactor);

http://git-wip-us.apache.org/repos/asf/ambari/blob/26949484/ambari-web/test/controllers/main/admin/stack_and_upgrade_controller_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/controllers/main/admin/stack_and_upgrade_controller_test.js b/ambari-web/test/controllers/main/admin/stack_and_upgrade_controller_test.js
index feaeafc..12d0da3 100644
--- a/ambari-web/test/controllers/main/admin/stack_and_upgrade_controller_test.js
+++ b/ambari-web/test/controllers/main/admin/stack_and_upgrade_controller_test.js
@@ -37,15 +37,16 @@ describe('App.MainAdminStackAndUpgradeController', function() {
   describe("#realRepoUrl", function() {
     before(function () {
       this.mock = sinon.stub(App, 'get');
-      this.mock.withArgs('apiPrefix').returns('apiPrefix')
-        .withArgs('stackVersionURL').returns('stackVersionURL');
+      this.mock.withArgs('apiPrefix').returns('apiPrefix');
     });
     after(function () {
       this.mock.restore();
     });
     it("should be valid", function() {
+      var expected = 'apiPrefix/stacks?fields=versions/repository_versions/RepositoryVersions,' +
+        'versions/repository_versions/operating_systems/*,versions/repository_versions/operating_systems/repositories/*';
       controller.propertyDidChange('realRepoUrl');
-      expect(controller.get('realRepoUrl')).to.equal('apiPrefixstackVersionURL/compatible_repository_versions?fields=*,operating_systems/*,operating_systems/repositories/*');
+      expect(controller.get('realRepoUrl')).to.equal(expected);
     });
   });
 
@@ -121,6 +122,9 @@ describe('App.MainAdminStackAndUpgradeController', function() {
       sinon.stub(controller, 'loadRepoVersionsToModel').returns({
         done: Em.clb
       });
+      sinon.stub(controller, 'loadCompatibleVersions').returns({
+        done: Em.clb
+      });
       sinon.stub(App.StackVersion, 'find').returns([Em.Object.create({
         state: 'CURRENT',
         repositoryVersion: {
@@ -134,6 +138,7 @@ describe('App.MainAdminStackAndUpgradeController', function() {
       controller.loadUpgradeData.restore();
       controller.loadStackVersionsToModel.restore();
       controller.loadRepoVersionsToModel.restore();
+      controller.loadCompatibleVersions.restore();
       App.StackVersion.find.restore();
     });
     it("loadUpgradeData called with valid arguments", function() {
@@ -145,6 +150,9 @@ describe('App.MainAdminStackAndUpgradeController', function() {
     it('loadRepoVersionsToModel called once', function () {
       expect(controller.loadRepoVersionsToModel.calledOnce).to.be.true;
     });
+    it('loadCompatibleVersions called once', function () {
+      expect(controller.loadCompatibleVersions.calledOnce).to.be.true;
+    });
     it('currentVersion is corrent', function () {
       expect(controller.get('currentVersion')).to.eql({
         "repository_version": "2.2",
@@ -3120,4 +3128,64 @@ describe('App.MainAdminStackAndUpgradeController', function() {
     });
   });
 
+  describe("#loadCompatibleVersions()", function () {
+
+    beforeEach(function() {
+      sinon.stub(App, 'get').returns('stack');
+    });
+
+    afterEach(function() {
+      App.get.restore();
+    });
+
+    it("App.ajax.send should be called", function() {
+      controller.loadCompatibleVersions();
+      var args = testHelpers.findAjaxRequest('name', 'admin.upgrade.get_compatible_versions');
+      expect(args[0]).to.be.eql({
+        name: 'admin.upgrade.get_compatible_versions',
+        sender: controller,
+        data: {
+          stackName: 'stack',
+          stackVersion: 'stack'
+        },
+        success: 'loadCompatibleVersionsSuccessCallback'
+      });
+    });
+  });
+
+  describe("#loadCompatibleVersionsSuccessCallback()", function () {
+    var mock = [
+      Em.Object.create({
+        repositoryVersion: 'HDP-1',
+        isCompatible: false
+      }),
+      Em.Object.create({
+        repositoryVersion: 'HDP-2',
+        isCompatible: false
+      })
+    ];
+
+    beforeEach(function() {
+      sinon.stub(App.RepositoryVersion, 'find').returns(mock);
+    });
+
+    afterEach(function() {
+      App.RepositoryVersion.find.restore();
+    });
+
+    it("should set isCompatible property", function() {
+      var data = {
+        items: [
+          {
+            CompatibleRepositoryVersions: {
+              repository_version: 'HDP-2'
+            }
+          }
+        ]
+      };
+      controller.loadCompatibleVersionsSuccessCallback(data);
+      expect(mock.mapProperty('isCompatible')).to.be.eql([false, true])
+    });
+  });
+
 });

http://git-wip-us.apache.org/repos/asf/ambari/blob/26949484/ambari-web/test/mappers/repository_version_mapper_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/mappers/repository_version_mapper_test.js b/ambari-web/test/mappers/repository_version_mapper_test.js
new file mode 100644
index 0000000..63fbb43
--- /dev/null
+++ b/ambari-web/test/mappers/repository_version_mapper_test.js
@@ -0,0 +1,46 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+var App = require('app');
+
+require('utils/helper');
+require('mappers/repository_version_mapper');
+
+describe('App.repoVersionMapper', function () {
+
+  describe("#convertToRepoScheme()", function () {
+
+    it("json is null", function() {
+      expect(App.repoVersionMapper.convertToRepoScheme(null)).to.be.eql({items: []});
+    });
+
+    it("json is correct", function() {
+      var json = {
+        items: [{
+          versions: [{
+            repository_versions: [{
+              id: 1
+            }]
+          }]
+        }]
+      };
+      expect(App.repoVersionMapper.convertToRepoScheme(json)).to.be.eql({items: [{id: 1}]});
+    });
+  });
+
+});

http://git-wip-us.apache.org/repos/asf/ambari/blob/26949484/ambari-web/test/views/main/admin/stack_upgrade/upgrade_version_box_view_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/views/main/admin/stack_upgrade/upgrade_version_box_view_test.js b/ambari-web/test/views/main/admin/stack_upgrade/upgrade_version_box_view_test.js
index d4114ed..4506d3a 100644
--- a/ambari-web/test/views/main/admin/stack_upgrade/upgrade_version_box_view_test.js
+++ b/ambari-web/test/views/main/admin/stack_upgrade/upgrade_version_box_view_test.js
@@ -141,7 +141,7 @@ describe('App.UpgradeVersionBoxView', function () {
     });
     it("init tooltips", function () {
       view.didInsertElement();
-      expect(App.tooltip.callCount).to.equal(3);
+      expect(App.tooltip.callCount).to.equal(4);
     });
   });
 
@@ -959,6 +959,7 @@ describe('App.UpgradeVersionBoxView', function () {
         upgradeIsRunning: true,
         upgradeSuspended: true,
         status: 'INSTALLED',
+        isCompatible: true,
         expected: true
       },
       {
@@ -966,6 +967,7 @@ describe('App.UpgradeVersionBoxView', function () {
         upgradeIsRunning: true,
         upgradeSuspended: false,
         status: 'INSTALLED',
+        isCompatible: true,
         expected: true
       },
       {
@@ -973,6 +975,7 @@ describe('App.UpgradeVersionBoxView', function () {
         upgradeIsRunning: false,
         upgradeSuspended: false,
         status: 'INSTALLING',
+        isCompatible: true,
         expected: true
       },
       {
@@ -980,6 +983,15 @@ describe('App.UpgradeVersionBoxView', function () {
         upgradeIsRunning: true,
         upgradeSuspended: true,
         status: 'INSTALLED',
+        isCompatible: false,
+        expected: true
+      },
+      {
+        requestInProgress: false,
+        upgradeIsRunning: true,
+        upgradeSuspended: true,
+        status: 'INSTALLED',
+        isCompatible: true,
         expected: false
       },
       {
@@ -987,6 +999,7 @@ describe('App.UpgradeVersionBoxView', function () {
         upgradeIsRunning: false,
         upgradeSuspended: false,
         status: 'INSTALLED',
+        isCompatible: true,
         expected: false
       }
     ];
@@ -1001,15 +1014,17 @@ describe('App.UpgradeVersionBoxView', function () {
 
     testCases.forEach(function(test) {
       it("requestInProgress: " + test.requestInProgress +
-         "upgradeIsRunning: " + test.upgradeIsRunning +
-         "upgradeSuspended: " + test.upgradeSuspended +
-         "status" + test.status, function() {
+         " upgradeIsRunning: " + test.upgradeIsRunning +
+         " upgradeSuspended: " + test.upgradeSuspended +
+         " status" + test.status +
+         " isCompatible" + test.isCompatible, function() {
         this.mock.withArgs('upgradeSuspended').returns(test.upgradeSuspended);
         this.mock.withArgs('upgradeIsRunning').returns(test.upgradeIsRunning);
         view.set('parentView.repoVersions', [Em.Object.create({
           status: test.status
         })]);
         view.set('controller.requestInProgress', test.requestInProgress);
+        view.set('content.isCompatible', test.isCompatible);
         expect(view.isDisabledOnInit()).to.be.equal(test.expected);
       });
     });


[5/8] ambari git commit: AMBARI-18487 : Test and refine Collector writes w.r.t sharing and timeouts. (avijayan)

Posted by jo...@apache.org.
AMBARI-18487 : Test and refine Collector writes w.r.t sharing and timeouts. (avijayan)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/c10053b0
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/c10053b0
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/c10053b0

Branch: refs/heads/branch-feature-AMBARI-18456
Commit: c10053b086ae1b4f41c3d4cdba18349144ef7ec4
Parents: 06f3b8e
Author: Aravindan Vijayan <av...@hortonworks.com>
Authored: Tue Oct 4 11:10:52 2016 -0700
Committer: Aravindan Vijayan <av...@hortonworks.com>
Committed: Tue Oct 4 11:10:52 2016 -0700

----------------------------------------------------------------------
 .../timeline/AbstractTimelineMetricsSink.java   | 75 +++++++++++++-------
 .../availability/MetricCollectorHAHelper.java   |  9 +--
 .../src/main/python/core/blacklisted_set.py     | 14 ++++
 .../src/main/python/core/config_reader.py       |  3 +-
 .../src/main/python/core/emitter.py             | 74 +++++++++++++------
 5 files changed, 122 insertions(+), 53 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/c10053b0/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/AbstractTimelineMetricsSink.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/AbstractTimelineMetricsSink.java b/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/AbstractTimelineMetricsSink.java
index fa5b694..efa5cba 100644
--- a/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/AbstractTimelineMetricsSink.java
+++ b/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/AbstractTimelineMetricsSink.java
@@ -55,6 +55,8 @@ import java.util.HashSet;
 import java.util.List;
 import java.util.Random;
 import java.util.Set;
+import java.util.SortedSet;
+import java.util.TreeSet;
 import java.util.concurrent.TimeUnit;
 import java.util.concurrent.atomic.AtomicInteger;
 
@@ -78,8 +80,9 @@ public abstract class AbstractTimelineMetricsSink {
 
   protected static final AtomicInteger failedCollectorConnectionsCounter = new AtomicInteger(0);
   public static int NUMBER_OF_SKIPPED_COLLECTOR_EXCEPTIONS = 100;
-  public int ZK_CONNECT_TRY_TIME = 10000;
+  public int ZK_CONNECT_TRY_COUNT = 10;
   public int ZK_SLEEP_BETWEEN_RETRY_TIME = 2000;
+  public boolean shardExpired = true;
 
   private SSLSocketFactory sslSocketFactory;
 
@@ -95,7 +98,7 @@ public abstract class AbstractTimelineMetricsSink {
   // well as timed refresh
   protected Supplier<String> targetCollectorHostSupplier;
 
-  protected final List<String> allKnownLiveCollectors = new ArrayList<>();
+  protected final SortedSet<String> allKnownLiveCollectors = new TreeSet<>();
 
   private volatile boolean isInitializedForHA = false;
 
@@ -127,7 +130,7 @@ public abstract class AbstractTimelineMetricsSink {
   protected void init() {
     metricSinkWriteShardStrategy = new MetricSinkWriteShardHostnameHashingStrategy(getHostname());
     collectorHAHelper = new MetricCollectorHAHelper(getZookeeperQuorum(),
-      ZK_CONNECT_TRY_TIME, ZK_SLEEP_BETWEEN_RETRY_TIME);
+      ZK_CONNECT_TRY_COUNT, ZK_SLEEP_BETWEEN_RETRY_TIME);
     isInitializedForHA = true;
   }
 
@@ -204,6 +207,8 @@ public abstract class AbstractTimelineMetricsSink {
       collectorHost = targetCollectorHostSupplier.get();
       // Last X attempts have failed - force refresh
       if (failedCollectorConnectionsCounter.get() > RETRY_COUNT_BEFORE_COLLECTOR_FAILOVER) {
+        LOG.info("Removing collector " + collectorHost + " from allKnownLiveCollectors.");
+        allKnownLiveCollectors.remove(collectorHost);
         targetCollectorHostSupplier = null;
         collectorHost = findPreferredCollectHost();
       }
@@ -321,6 +326,7 @@ public abstract class AbstractTimelineMetricsSink {
       init();
     }
 
+    shardExpired = false;
     // Auto expire and re-calculate after 1 hour
     if (targetCollectorHostSupplier != null) {
       String targetCollector = targetCollectorHostSupplier.get();
@@ -329,32 +335,12 @@ public abstract class AbstractTimelineMetricsSink {
       }
     }
 
-    Collection<String> collectorHosts = getConfiguredCollectorHosts();
-
-    LOG.debug("Trying to find live collector host from : " + collectorHosts);
     // Reach out to all configured collectors before Zookeeper
-    if (collectorHosts != null && !collectorHosts.isEmpty()) {
-      for (String hostStr : collectorHosts) {
-        hostStr = hostStr.trim();
-        if (!hostStr.isEmpty()) {
-          try {
-            Collection<String> liveHosts = findLiveCollectorHostsFromKnownCollector(hostStr, getCollectorPort());
-            // Update live Hosts - current host will already be a part of this
-            for (String host : liveHosts) {
-              allKnownLiveCollectors.add(host);
-            }
-            break; // Found at least 1 live collector
-          } catch (MetricCollectorUnavailableException e) {
-            LOG.info("Collector " + hostStr + " is not longer live. Removing " +
-              "it from list of know live collector hosts : " + allKnownLiveCollectors);
-            allKnownLiveCollectors.remove(hostStr);
-          }
-        }
-      }
-    }
+    refreshCollectorsFromConfigured();
 
     // Lookup Zookeeper for live hosts - max 10 seconds wait time
     if (allKnownLiveCollectors.size() == 0 && getZookeeperQuorum() != null) {
+      LOG.info("No live collectors from configuration. Requesting zookeeper...");
       allKnownLiveCollectors.addAll(collectorHAHelper.findLiveCollectorHostsFromZNode());
     }
 
@@ -363,6 +349,13 @@ public abstract class AbstractTimelineMetricsSink {
         new Supplier<String>() {
           @Override
           public String get() {
+            //shardExpired flag is used to determine if the Supplier.get() is invoked through the
+            // findPreferredCollectHost method (No need to refresh collector hosts
+            // OR
+            // through Expiry (Refresh needed to pick up dead collectors that might have not become alive).
+            if (shardExpired) {
+              refreshCollectorsFromConfigured();
+            }
             return metricSinkWriteShardStrategy.findCollectorShard(new ArrayList<>(allKnownLiveCollectors));
           }
         },  // random.nextInt(max - min + 1) + min # (60 to 75 minutes)
@@ -372,12 +365,40 @@ public abstract class AbstractTimelineMetricsSink {
         TimeUnit.MINUTES
       );
 
-      return targetCollectorHostSupplier.get();
+      String collectorHost = targetCollectorHostSupplier.get();
+      shardExpired = true;
+      return collectorHost;
     }
     LOG.warn("Couldn't find any live collectors. Returning null");
+    shardExpired = true;
     return null;
   }
 
+  private void refreshCollectorsFromConfigured() {
+    Collection<String> collectorHosts = getConfiguredCollectorHosts();
+
+    LOG.debug("Trying to find live collector host from : " + collectorHosts);
+    if (collectorHosts != null && !collectorHosts.isEmpty()) {
+      for (String hostStr : collectorHosts) {
+        hostStr = hostStr.trim();
+        if (!hostStr.isEmpty()) {
+          try {
+            Collection<String> liveHosts = findLiveCollectorHostsFromKnownCollector(hostStr, getCollectorPort());
+            // Update live Hosts - current host will already be a part of this
+            for (String host : liveHosts) {
+              allKnownLiveCollectors.add(host);
+            }
+            break; // Found at least 1 live collector
+          } catch (MetricCollectorUnavailableException e) {
+            LOG.info("Collector " + hostStr + " is not longer live. Removing " +
+              "it from list of know live collector hosts : " + allKnownLiveCollectors);
+            allKnownLiveCollectors.remove(hostStr);
+          }
+        }
+      }
+    }
+  }
+
   Collection<String> findLiveCollectorHostsFromKnownCollector(String host, String port) throws MetricCollectorUnavailableException {
     List<String> collectors = new ArrayList<>();
     HttpURLConnection connection = null;
@@ -426,7 +447,7 @@ public abstract class AbstractTimelineMetricsSink {
       LOG.debug(errorMessage);
       LOG.debug(ioe);
       String warnMsg = "Unable to connect to collector to find live nodes.";
-      LOG.warn(warnMsg, ioe);
+      LOG.warn(warnMsg);
       throw new MetricCollectorUnavailableException(warnMsg);
     }
     return collectors;

http://git-wip-us.apache.org/repos/asf/ambari/blob/c10053b0/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/availability/MetricCollectorHAHelper.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/availability/MetricCollectorHAHelper.java b/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/availability/MetricCollectorHAHelper.java
index 4d0ec14..2254362 100644
--- a/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/availability/MetricCollectorHAHelper.java
+++ b/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/availability/MetricCollectorHAHelper.java
@@ -22,6 +22,7 @@ import org.apache.commons.logging.LogFactory;
 import org.apache.curator.CuratorZookeeperClient;
 import org.apache.curator.RetryLoop;
 import org.apache.curator.RetryPolicy;
+import org.apache.curator.retry.BoundedExponentialBackoffRetry;
 import org.apache.curator.retry.RetryUntilElapsed;
 import org.apache.zookeeper.ZooKeeper;
 
@@ -38,7 +39,7 @@ import java.util.concurrent.Callable;
  */
 public class MetricCollectorHAHelper {
   private final String zookeeperQuorum;
-  private final int tryTime;
+  private final int tryCount;
   private final int sleepMsBetweenRetries;
 
   private static final int CONNECTION_TIMEOUT = 2000;
@@ -50,9 +51,9 @@ public class MetricCollectorHAHelper {
 
   private static final Log LOG = LogFactory.getLog(MetricCollectorHAHelper.class);
 
-  public MetricCollectorHAHelper(String zookeeperQuorum, int tryTime, int sleepMsBetweenRetries) {
+  public MetricCollectorHAHelper(String zookeeperQuorum, int tryCount, int sleepMsBetweenRetries) {
     this.zookeeperQuorum = zookeeperQuorum;
-    this.tryTime = tryTime;
+    this.tryCount = tryCount;
     this.sleepMsBetweenRetries = sleepMsBetweenRetries;
   }
 
@@ -63,7 +64,7 @@ public class MetricCollectorHAHelper {
   public Collection<String> findLiveCollectorHostsFromZNode() {
     Set<String> collectors = new HashSet<>();
 
-    RetryPolicy retryPolicy = new RetryUntilElapsed(tryTime, sleepMsBetweenRetries);
+    RetryPolicy retryPolicy = new BoundedExponentialBackoffRetry(sleepMsBetweenRetries, 10*sleepMsBetweenRetries, tryCount);
     final CuratorZookeeperClient client = new CuratorZookeeperClient(zookeeperQuorum,
       SESSION_TIMEOUT, CONNECTION_TIMEOUT, null, retryPolicy);
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/c10053b0/ambari-metrics/ambari-metrics-host-monitoring/src/main/python/core/blacklisted_set.py
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-host-monitoring/src/main/python/core/blacklisted_set.py b/ambari-metrics/ambari-metrics-host-monitoring/src/main/python/core/blacklisted_set.py
index 3982c4e..dab54c0 100644
--- a/ambari-metrics/ambari-metrics-host-monitoring/src/main/python/core/blacklisted_set.py
+++ b/ambari-metrics/ambari-metrics-host-monitoring/src/main/python/core/blacklisted_set.py
@@ -41,6 +41,20 @@ class BlacklistedSet(set):
       if time.time() > self.__dict.get(item):
         yield item
 
+  def get_actual_size(self):
+    size = 0
+    for item in self.__iter__():
+      size += 1
+    return size
+
+  def get_item_at_index(self, index):
+    i = 0
+    for item in self.__iter__():
+      if i == index:
+        return item
+      i += 1
+    return None
+
   def blacklist(self, item):
     self.__dict[item] = time.time() + self.__blacklist_timeout
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/c10053b0/ambari-metrics/ambari-metrics-host-monitoring/src/main/python/core/config_reader.py
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-host-monitoring/src/main/python/core/config_reader.py b/ambari-metrics/ambari-metrics-host-monitoring/src/main/python/core/config_reader.py
index 3ca3a31..890d3ce 100644
--- a/ambari-metrics/ambari-metrics-host-monitoring/src/main/python/core/config_reader.py
+++ b/ambari-metrics/ambari-metrics-host-monitoring/src/main/python/core/config_reader.py
@@ -101,6 +101,7 @@ AMBARI_AGENT_CONF = '/etc/ambari-agent/conf/ambari-agent.ini'
 config_content = """
 [default]
 debug_level = INFO
+hostname = localhost
 metrics_servers = ['localhost','host1','host2']
 enable_time_threshold = false
 enable_value_threshold = false
@@ -112,7 +113,7 @@ send_interval = 60
 collector_sleep_interval = 5
 max_queue_size = 5000
 failover_strategy = round-robin
-failover_strategy_blacklisted_interval_seconds = 0
+failover_strategy_blacklisted_interval_seconds = 60
 host = localhost
 port = 6188
 https_enabled = false

http://git-wip-us.apache.org/repos/asf/ambari/blob/c10053b0/ambari-metrics/ambari-metrics-host-monitoring/src/main/python/core/emitter.py
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-host-monitoring/src/main/python/core/emitter.py b/ambari-metrics/ambari-metrics-host-monitoring/src/main/python/core/emitter.py
index 050af16..ba3f18e 100644
--- a/ambari-metrics/ambari-metrics-host-monitoring/src/main/python/core/emitter.py
+++ b/ambari-metrics/ambari-metrics-host-monitoring/src/main/python/core/emitter.py
@@ -39,6 +39,8 @@ class Emitter(threading.Thread):
     logger.debug('Initializing Emitter thread.')
     self.lock = threading.Lock()
     self.send_interval = config.get_send_interval()
+    self.hostname = config.get_hostname_config()
+    self.hostname_hash = self.compute_hash(self.hostname)
     self._stop_handler = stop_handler
     self.application_metric_map = application_metric_map
     self.collector_port = config.get_server_port()
@@ -80,30 +82,42 @@ class Emitter(threading.Thread):
     self.push_metrics(json_data)
 
   def push_metrics(self, data):
+    success = False
+    while self.active_collector_hosts.get_actual_size() > 0:
+      collector_host = self.get_collector_host_shard()
+      success = self.try_with_collector_host(collector_host, data)
+      if success:
+        break
+    pass
+
+    if not success:
+      logger.info('No valid collectors found...')
+      for collector_host in self.active_collector_hosts:
+        success = self.try_with_collector_host(collector_host, data)
+      pass
+
+  def try_with_collector_host(self, collector_host, data):
     headers = {"Content-Type" : "application/json", "Accept" : "*/*"}
-    for collector_host in self.active_collector_hosts:
-      connection = self.get_connection(collector_host)
-      logger.info("server: %s" % collector_host)
-      logger.debug("message to sent: %s" % data)
-
-      retry_count = 0
-      while retry_count < self.MAX_RETRY_COUNT:
-        response = self.get_response_from_submission(connection, data, headers)
-        if response and response.status == 200:
-          return
-        else:
-          logger.warn("Retrying after {0} ...".format(self.RETRY_SLEEP_INTERVAL))
-          retry_count += 1
-          #Wait for the service stop event instead of sleeping blindly
-          if 0 == self._stop_handler.wait(self.RETRY_SLEEP_INTERVAL):
-            return
-        pass
-
-      if retry_count >= self.MAX_RETRY_COUNT:
-        self.active_collector_hosts.blacklist(collector_host)
-        logger.warn("Metric collector host {0} was blacklisted.".format(collector_host))
+    connection = self.get_connection(collector_host)
+    logger.debug("message to send: %s" % data)
+    retry_count = 0
+    while retry_count < self.MAX_RETRY_COUNT:
+      response = self.get_response_from_submission(connection, data, headers)
+      if response and response.status == 200:
+        return True
+      else:
+        logger.warn("Retrying after {0} ...".format(self.RETRY_SLEEP_INTERVAL))
+        retry_count += 1
+        #Wait for the service stop event instead of sleeping blindly
+        if 0 == self._stop_handler.wait(self.RETRY_SLEEP_INTERVAL):
+          return True
     pass
 
+    if retry_count >= self.MAX_RETRY_COUNT:
+      self.active_collector_hosts.blacklist(collector_host)
+      logger.warn("Metric collector host {0} was blacklisted.".format(collector_host))
+      return False
+
   def get_connection(self, collector_host):
     timeout = int(self.send_interval - 10)
     if self.is_server_https_enabled:
@@ -130,3 +144,21 @@ class Emitter(threading.Thread):
       logger.warn('Error sending metrics to server. %s' % str(e))
       return None
 
+  def get_collector_host_shard(self):
+    size = self.active_collector_hosts.get_actual_size()
+    index = self.hostname_hash % size
+    index = index if index >= 0 else index + size
+    hostname = self.active_collector_hosts.get_item_at_index(index)
+    logger.info('Calculated collector shard based on hostname : %s' % hostname)
+    return hostname
+
+  def compute_hash(self, hostname):
+    hash = 11987
+    length = len(hostname)
+    for i in xrange(0, length - 1):
+      hash = 31*hash + ord(hostname[i])
+    return hash
+
+
+
+


[4/8] ambari git commit: AMBARI-18517 : Changes in upgrade path for Kafka metrics collector hosts config. (avijayan)

Posted by jo...@apache.org.
AMBARI-18517 : Changes in upgrade path for Kafka metrics collector hosts config. (avijayan)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/06f3b8e9
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/06f3b8e9
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/06f3b8e9

Branch: refs/heads/branch-feature-AMBARI-18456
Commit: 06f3b8e9006ef6ad1533f33baa0c9544547bd244
Parents: 1bf2069
Author: Aravindan Vijayan <av...@hortonworks.com>
Authored: Tue Oct 4 11:10:03 2016 -0700
Committer: Aravindan Vijayan <av...@hortonworks.com>
Committed: Tue Oct 4 11:10:20 2016 -0700

----------------------------------------------------------------------
 .../server/upgrade/UpgradeCatalog250.java       | 30 +++++++++-
 .../server/upgrade/UpgradeCatalog250Test.java   | 61 ++++++++++++++++++++
 2 files changed, 90 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/06f3b8e9/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog250.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog250.java b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog250.java
index 185bd58..091c6d9 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog250.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog250.java
@@ -45,6 +45,9 @@ public class UpgradeCatalog250 extends AbstractUpgradeCatalog {
 
   protected static final String HOST_VERSION_TABLE = "host_version";
   private static final String AMS_ENV = "ams-env";
+  private static final String KAFKA_BROKER = "kafka-broker";
+  private static final String KAFKA_TIMELINE_METRICS_HOST = "kafka.timeline.metrics.host";
+
   /**
    * Logger.
    */
@@ -110,6 +113,7 @@ public class UpgradeCatalog250 extends AbstractUpgradeCatalog {
   protected void executeDMLUpdates() throws AmbariException, SQLException {
     updateAMSConfigs();
     createRoleAuthorizations();
+    updateKafkaConfigs();
   }
 
   protected void updateHostVersionTable() throws SQLException {
@@ -181,7 +185,31 @@ public class UpgradeCatalog250 extends AbstractUpgradeCatalog {
         Arrays.asList("AMBARI.ADMINISTRATOR:AMBARI", "CLUSTER.ADMINISTRATOR:CLUSTER"));
 
     addRoleAuthorization("AMBARI.RUN_CUSTOM_COMMAND", "Perform custom administrative actions",
-        Collections.singletonList("AMBARI.ADMINISTRATOR:AMBARI"));
+      Collections.singletonList("AMBARI.ADMINISTRATOR:AMBARI"));
+  }
+
+  protected void updateKafkaConfigs() throws AmbariException {
+    AmbariManagementController ambariManagementController = injector.getInstance(AmbariManagementController.class);
+    Clusters clusters = ambariManagementController.getClusters();
+
+    if (clusters != null) {
+      Map<String, Cluster> clusterMap = clusters.getClusters();
+
+      if (clusterMap != null && !clusterMap.isEmpty()) {
+        for (final Cluster cluster : clusterMap.values()) {
+
+          Config kafkaBrokerConfig = cluster.getDesiredConfigByType(KAFKA_BROKER);
+          if (kafkaBrokerConfig != null) {
+            Map<String, String> kafkaBrokerProperties = kafkaBrokerConfig.getProperties();
+
+            if (kafkaBrokerProperties != null && kafkaBrokerProperties.containsKey(KAFKA_TIMELINE_METRICS_HOST)) {
+              LOG.info("Removing kafka.timeline.metrics.host from kafka-broker");
+              removeConfigurationPropertiesFromCluster(cluster, KAFKA_BROKER, Collections.singleton("kafka.timeline.metrics.host"));
+            }
+          }
+        }
+      }
+    }
   }
 }
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/06f3b8e9/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog250Test.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog250Test.java b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog250Test.java
index 7b6c3ad..9f34bcc 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog250Test.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog250Test.java
@@ -124,10 +124,12 @@ public class UpgradeCatalog250Test {
   public void testExecuteDMLUpdates() throws Exception {
     Method updateAmsConfigs = UpgradeCatalog250.class.getDeclaredMethod("updateAMSConfigs");
     Method createRoleAuthorizations = UpgradeCatalog250.class.getDeclaredMethod("createRoleAuthorizations");
+    Method updateKafkaConfigs = UpgradeCatalog250.class.getDeclaredMethod("updateKafkaConfigs");
 
     UpgradeCatalog250 upgradeCatalog250 = createMockBuilder(UpgradeCatalog250.class)
         .addMockedMethod(updateAmsConfigs)
         .addMockedMethod(createRoleAuthorizations)
+        .addMockedMethod(updateKafkaConfigs)
         .createMock();
 
     upgradeCatalog250.updateAMSConfigs();
@@ -136,6 +138,9 @@ public class UpgradeCatalog250Test {
     upgradeCatalog250.createRoleAuthorizations();
     expectLastCall().once();
 
+    upgradeCatalog250.updateKafkaConfigs();
+    expectLastCall().once();
+
     replay(upgradeCatalog250);
 
     upgradeCatalog250.executeDMLUpdates();
@@ -291,4 +296,60 @@ public class UpgradeCatalog250Test {
     Assert.assertEquals(1, clusterAdministratorAuthorizations.size());
     Assert.assertTrue(clusterAdministratorAuthorizations.contains(clusterRunCustomCommandEntity));
   }
+
+  @Test
+  public void testKafkaUpdateConfigs() throws Exception{
+
+    Map<String, String> oldProperties = new HashMap<String, String>() {
+      {
+        put("kafka.timeline.metrics.host", "{{metric_collector_host}}");
+        put("kafka.timeline.metrics.port", "{{metric_collector_port}}");
+      }
+    };
+    Map<String, String> newProperties = new HashMap<String, String>() {
+      {
+        put("kafka.timeline.metrics.port", "{{metric_collector_port}}");
+      }
+    };
+    EasyMockSupport easyMockSupport = new EasyMockSupport();
+
+    Clusters clusters = easyMockSupport.createNiceMock(Clusters.class);
+    final Cluster cluster = easyMockSupport.createNiceMock(Cluster.class);
+    Config mockKafkaBroker = easyMockSupport.createNiceMock(Config.class);
+
+    expect(clusters.getClusters()).andReturn(new HashMap<String, Cluster>() {{
+      put("normal", cluster);
+    }}).once();
+    expect(cluster.getDesiredConfigByType("kafka-broker")).andReturn(mockKafkaBroker).atLeastOnce();
+    expect(mockKafkaBroker.getProperties()).andReturn(oldProperties).anyTimes();
+
+    Injector injector = easyMockSupport.createNiceMock(Injector.class);
+    expect(injector.getInstance(Gson.class)).andReturn(null).anyTimes();
+    expect(injector.getInstance(MaintenanceStateHelper.class)).andReturn(null).anyTimes();
+    expect(injector.getInstance(KerberosHelper.class)).andReturn(createNiceMock(KerberosHelper.class)).anyTimes();
+
+    replay(injector, clusters, mockKafkaBroker, cluster);
+
+    AmbariManagementControllerImpl controller = createMockBuilder(AmbariManagementControllerImpl.class)
+      .addMockedMethod("createConfiguration")
+      .addMockedMethod("getClusters", new Class[] { })
+      .addMockedMethod("createConfig")
+      .withConstructor(createNiceMock(ActionManager.class), clusters, injector)
+      .createNiceMock();
+
+    Injector injector2 = easyMockSupport.createNiceMock(Injector.class);
+    Capture<Map> propertiesCapture = EasyMock.newCapture();
+
+    expect(injector2.getInstance(AmbariManagementController.class)).andReturn(controller).anyTimes();
+    expect(controller.getClusters()).andReturn(clusters).anyTimes();
+    expect(controller.createConfig(anyObject(Cluster.class), anyString(), capture(propertiesCapture), anyString(),
+      anyObject(Map.class))).andReturn(createNiceMock(Config.class)).once();
+
+    replay(controller, injector2);
+    new UpgradeCatalog250(injector2).updateKafkaConfigs();
+    easyMockSupport.verifyAll();
+
+    Map<String, String> updatedProperties = propertiesCapture.getValue();
+    assertTrue(Maps.difference(newProperties, updatedProperties).areEqual());
+  }
 }


[3/8] ambari git commit: AMBARI-18463. Regression: krb5JAASLogin.conf is not updated during secure BP install (rlevas)

Posted by jo...@apache.org.
AMBARI-18463. Regression: krb5JAASLogin.conf is not updated during secure BP install (rlevas)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/1bf20690
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/1bf20690
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/1bf20690

Branch: refs/heads/branch-feature-AMBARI-18456
Commit: 1bf206907ea26eeeada640406ae2c130aa4140c9
Parents: 2694948
Author: Robert Levas <rl...@hortonworks.com>
Authored: Tue Oct 4 13:15:43 2016 -0400
Committer: Robert Levas <rl...@hortonworks.com>
Committed: Tue Oct 4 13:16:08 2016 -0400

----------------------------------------------------------------------
 .../server/controller/KerberosHelperImpl.java   | 243 ++++++++++++++-----
 .../ConfigureAmbariIdentitiesServerAction.java  | 190 +++++++++++----
 .../kerberos/FinalizeKerberosServerAction.java  | 118 ++++++++-
 .../kerberos/KerberosOperationHandler.java      |   2 +-
 .../ambari/server/utils/ShellCommandUtil.java   | 121 +++++++++
 .../resources/stacks/HDP/2.0.6/kerberos.json    |  14 +-
 .../server/controller/KerberosHelperTest.java   | 195 ++++++++++++---
 ...nfigureAmbariIdentitiesServerActionTest.java | 204 ++++++++++++++++
 8 files changed, 936 insertions(+), 151 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/1bf20690/ambari-server/src/main/java/org/apache/ambari/server/controller/KerberosHelperImpl.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/KerberosHelperImpl.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/KerberosHelperImpl.java
index a3c6fd4..cd4a0b5 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/KerberosHelperImpl.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/KerberosHelperImpl.java
@@ -54,7 +54,6 @@ import org.apache.ambari.server.metadata.RoleCommandOrder;
 import org.apache.ambari.server.orm.dao.ArtifactDAO;
 import org.apache.ambari.server.orm.dao.KerberosPrincipalDAO;
 import org.apache.ambari.server.orm.entities.ArtifactEntity;
-import org.apache.ambari.server.orm.entities.KerberosPrincipalEntity;
 import org.apache.ambari.server.security.credential.Credential;
 import org.apache.ambari.server.security.credential.PrincipalKeyCredential;
 import org.apache.ambari.server.security.encryption.CredentialStoreService;
@@ -133,7 +132,7 @@ public class KerberosHelperImpl implements KerberosHelper {
   /**
    * The set of states a component may be in, indicating that is have been previously installed on
    * the cluster.
-   *
+   * <p>
    * These values are important when trying to determine the state of the cluster when adding new components
    */
   private static final Set<State> PREVIOUSLY_INSTALLED_STATES = EnumSet.of(State.INSTALLED, State.STARTED, State.DISABLED);
@@ -711,7 +710,7 @@ public class KerberosHelperImpl implements KerberosHelper {
                 identityDescriptors = serviceDescriptor.getIdentities(true, filterContext);
                 if (identityDescriptors != null) {
                   for (KerberosIdentityDescriptor identityDescriptor : identityDescriptors) {
-                    createUserIdentity(identityDescriptor, kerberosConfiguration, kerberosOperationHandler, configurations);
+                    createIdentity(identityDescriptor, KerberosPrincipalType.USER, kerberosConfiguration, kerberosOperationHandler, configurations, null);
                   }
                 }
 
@@ -719,7 +718,7 @@ public class KerberosHelperImpl implements KerberosHelper {
                 identityDescriptors = componentDescriptor.getIdentities(true, filterContext);
                 if (identityDescriptors != null) {
                   for (KerberosIdentityDescriptor identityDescriptor : identityDescriptors) {
-                    createUserIdentity(identityDescriptor, kerberosConfiguration, kerberosOperationHandler, configurations);
+                    createIdentity(identityDescriptor, KerberosPrincipalType.USER, kerberosConfiguration, kerberosOperationHandler, configurations, null);
                   }
                 }
               }
@@ -730,16 +729,7 @@ public class KerberosHelperImpl implements KerberosHelper {
 
       // create Ambari principal & keytab, configure JAAS only if 'kerberos-env.create_ambari_principal = true'
       if (kerberosDetails.createAmbariPrincipal()) {
-        KerberosIdentityDescriptor ambariServerIdentity = kerberosDescriptor.getIdentity(KerberosHelper.AMBARI_IDENTITY_NAME);
-        if (ambariServerIdentity != null) {
-          createUserIdentity(ambariServerIdentity, kerberosConfiguration, kerberosOperationHandler, configurations);
-          installAmbariIdentity(ambariServerIdentity, configurations);
-          try {
-            KerberosChecker.checkJaasConfiguration();
-          } catch (AmbariException e) {
-            LOG.error("Error in Ambari JAAS configuration: ", e);
-          }
-        }
+        installAmbariIdentities(kerberosDescriptor, kerberosOperationHandler, kerberosConfiguration, configurations, kerberosDetails);
       }
 
       // The KerberosOperationHandler needs to be closed, if it fails to close ignore the
@@ -756,27 +746,129 @@ public class KerberosHelperImpl implements KerberosHelper {
   }
 
   /**
+   * Install identities needed by the Ambari server, itself.
+   * <p>
+   * The Ambari server needs its own identity for authentication; and, if Kerberos authentication is
+   * enabled, it needs a SPNEGO principal for ticket validation routines.
+   * <p>
+   * Any identities needed by the Ambari server need to be installed separately since an agent may not
+   * exist on the host and therefore distributing the keytab file(s) to the Ambari server host may
+   * not be possible using the same workflow used for other hosts in the cluster.
+   *
+   * @param kerberosDescriptor       the Kerberos descriptor
+   * @param kerberosOperationHandler the relevant KerberosOperationHandler
+   * @param kerberosEnvProperties    the kerberos-env properties
+   * @param configurations           a map of config-types to property name/value pairs representing
+   *                                 the existing configurations for the cluster
+   * @param kerberosDetails          a KerberosDetails containing information about relevant Kerberos
+   *                                 configuration
+   * @throws AmbariException
+   */
+  private void installAmbariIdentities(AbstractKerberosDescriptorContainer kerberosDescriptor,
+                                       KerberosOperationHandler kerberosOperationHandler,
+                                       Map<String, String> kerberosEnvProperties,
+                                       Map<String, Map<String, String>> configurations,
+                                       KerberosDetails kerberosDetails) throws AmbariException {
+
+    // Install Ambari's user/service principal...
+    String ambariServerHostname = StageUtils.getHostName();
+    KerberosIdentityDescriptor identity;
+
+    identity = kerberosDescriptor.getIdentity(KerberosHelper.AMBARI_IDENTITY_NAME);
+    if (identity != null) {
+      KerberosPrincipalDescriptor principal = identity.getPrincipalDescriptor();
+      if (principal != null) {
+        Keytab keytab = createIdentity(identity, principal.getType(), kerberosEnvProperties, kerberosOperationHandler, configurations, ambariServerHostname);
+        installAmbariIdentity(identity, keytab, configurations, ambariServerHostname, kerberosDetails, true);
+
+        try {
+          KerberosChecker.checkJaasConfiguration();
+        } catch (AmbariException e) {
+          LOG.error("Error in Ambari JAAS configuration: " + e.getLocalizedMessage(), e);
+        }
+      }
+    }
+
+    // Install Ambari's SPNGEO principal...
+    identity = kerberosDescriptor.getIdentity(KerberosHelper.SPNEGO_IDENTITY_NAME);
+    if (identity != null) {
+      KerberosPrincipalDescriptor principal = identity.getPrincipalDescriptor();
+
+      if (principal != null) {
+        Keytab keytab = createIdentity(identity, principal.getType(), kerberosEnvProperties, kerberosOperationHandler, configurations, ambariServerHostname);
+        installAmbariIdentity(identity, keytab, configurations, ambariServerHostname, kerberosDetails, false);
+      }
+    }
+  }
+
+  /**
    * Performs tasks needed to install the Kerberos identities created for the Ambari server.
    *
    * @param ambariServerIdentity the ambari server's {@link KerberosIdentityDescriptor}
-   * @param configurations       a map of compiled configrations used for variable replacment
+   * @param keytab               the Keyab data for the relevant identity
+   * @param configurations       a map of compiled configurations used for variable replacement
+   * @param hostname             the hostname to use to replace _HOST in principal names, if necessary
+   * @param kerberosDetails      a KerberosDetails containing information about relevant Kerberos configuration
+   * @param updateJAASFile       true to update Ambari's JAAS file; false otherwise
    * @throws AmbariException
-   * @see ConfigureAmbariIdentitiesServerAction#installAmbariServerIdentity(String, String, String, ActionLog)
+   * @see ConfigureAmbariIdentitiesServerAction#configureJAAS(String, String, ActionLog)
    */
   private void installAmbariIdentity(KerberosIdentityDescriptor ambariServerIdentity,
-                                     Map<String, Map<String, String>> configurations) throws AmbariException {
+                                     Keytab keytab, Map<String, Map<String, String>> configurations,
+                                     String hostname,
+                                     KerberosDetails kerberosDetails,
+                                     boolean updateJAASFile) throws AmbariException {
     KerberosPrincipalDescriptor principalDescriptor = ambariServerIdentity.getPrincipalDescriptor();
+
     if (principalDescriptor != null) {
       String principal = variableReplacementHelper.replaceVariables(principalDescriptor.getValue(), configurations);
-      KerberosPrincipalEntity ambariServerPrincipalEntity = kerberosPrincipalDAO.find(principal);
 
-      if(ambariServerPrincipalEntity != null) {
-        KerberosKeytabDescriptor keytabDescriptor = ambariServerIdentity.getKeytabDescriptor();
-        if(keytabDescriptor != null) {
-          String keytabFilePath = variableReplacementHelper.replaceVariables(keytabDescriptor.getFile(), configurations);
+      // Replace _HOST with the supplied hostname is either exist
+      if (!StringUtils.isEmpty(hostname)) {
+        principal = principal.replace("_HOST", hostname);
+      }
 
-          injector.getInstance(ConfigureAmbariIdentitiesServerAction.class)
-              .installAmbariServerIdentity(principal, ambariServerPrincipalEntity.getCachedKeytabPath(), keytabFilePath, null);
+      KerberosKeytabDescriptor keytabDescriptor = ambariServerIdentity.getKeytabDescriptor();
+      if (keytabDescriptor != null) {
+        String destKeytabFilePath = variableReplacementHelper.replaceVariables(keytabDescriptor.getFile(), configurations);
+        File destKeytabFile = new File(destKeytabFilePath);
+
+        ConfigureAmbariIdentitiesServerAction configureAmbariIdentitiesServerAction = injector.getInstance(ConfigureAmbariIdentitiesServerAction.class);
+
+        if (keytab != null) {
+          try {
+            KerberosOperationHandler operationHandler = kerberosOperationHandlerFactory.getKerberosOperationHandler(kerberosDetails.getKdcType());
+            File tmpKeytabFile = createTemporaryFile();
+            try {
+              if ((operationHandler != null) && operationHandler.createKeytabFile(keytab, tmpKeytabFile)) {
+                String ownerName = variableReplacementHelper.replaceVariables(keytabDescriptor.getOwnerName(), configurations);
+                String ownerAccess = keytabDescriptor.getOwnerAccess();
+                boolean ownerWritable = "w".equalsIgnoreCase(ownerAccess) || "rw".equalsIgnoreCase(ownerAccess);
+                boolean ownerReadable = "r".equalsIgnoreCase(ownerAccess) || "rw".equalsIgnoreCase(ownerAccess);
+                String groupName = variableReplacementHelper.replaceVariables(keytabDescriptor.getGroupName(), configurations);
+                String groupAccess = keytabDescriptor.getGroupAccess();
+                boolean groupWritable = "w".equalsIgnoreCase(groupAccess) || "rw".equalsIgnoreCase(groupAccess);
+                boolean groupReadable = "r".equalsIgnoreCase(groupAccess) || "rw".equalsIgnoreCase(groupAccess);
+
+                configureAmbariIdentitiesServerAction.installAmbariServerIdentity(principal, tmpKeytabFile.getAbsolutePath(), destKeytabFilePath,
+                    ownerName, ownerReadable, ownerWritable, groupName, groupReadable, groupWritable, null);
+                LOG.debug("Successfully created keytab file for {} at {}", principal, destKeytabFile.getAbsolutePath());
+              } else {
+                LOG.error("Failed to create keytab file for {} at {}", principal, destKeytabFile.getAbsolutePath());
+              }
+            } finally {
+              tmpKeytabFile.delete();
+            }
+          } catch (KerberosOperationException e) {
+            throw new AmbariException(String.format("Failed to create keytab file for %s at %s: %s:",
+                principal, destKeytabFile.getAbsolutePath(), e.getLocalizedMessage()), e);
+          }
+        } else {
+          LOG.error("No keytab data is available to create the keytab file for {} at {}", principal, destKeytabFile.getAbsolutePath());
+        }
+
+        if (updateJAASFile) {
+          configureAmbariIdentitiesServerAction.configureJAAS(principal, destKeytabFile.getAbsolutePath(), null);
         }
       }
     }
@@ -1388,33 +1480,40 @@ public class KerberosHelperImpl implements KerberosHelper {
 
   /**
    * Creates the principal and cached keytab file for the specified identity, if it is determined to
-   * be user (or headless) identity
+   * be of the expected type - user (headless) or service.
    * <p/>
-   * If the identity is determined not to be a user identity, it is skipped.
+   * If the identity is not of the expected type, it will be skipped.
    *
    * @param identityDescriptor       the Kerberos identity to process
+   * @param expectedType             the expected principal type
    * @param kerberosEnvProperties    the kerberos-env properties
    * @param kerberosOperationHandler the relevant KerberosOperationHandler
    * @param configurations           the existing configurations for the cluster
-   * @return true if the identity was created; otherwise false
+   * @param hostname                 the hostname of the host to create the identity for (nullable)
+   * @return the relevant keytab data, if successful; otherwise null
    * @throws AmbariException
    */
-  private boolean createUserIdentity(KerberosIdentityDescriptor identityDescriptor,
-                                     Map<String, String> kerberosEnvProperties,
-                                     KerberosOperationHandler kerberosOperationHandler,
-                                     Map<String, Map<String, String>> configurations)
+  private Keytab createIdentity(KerberosIdentityDescriptor identityDescriptor,
+                                KerberosPrincipalType expectedType, Map<String, String> kerberosEnvProperties,
+                                KerberosOperationHandler kerberosOperationHandler,
+                                Map<String, Map<String, String>> configurations, String hostname)
       throws AmbariException {
 
-    boolean created = false;
+    Keytab keytab = null;
 
     if (identityDescriptor != null) {
       KerberosPrincipalDescriptor principalDescriptor = identityDescriptor.getPrincipalDescriptor();
 
       if (principalDescriptor != null) {
-        // If this principal indicates it is a user principal, continue, else skip it.
-        if (KerberosPrincipalType.USER == principalDescriptor.getType()) {
+        // If this principal type is expected, continue, else skip it.
+        if (expectedType == principalDescriptor.getType()) {
           String principal = variableReplacementHelper.replaceVariables(principalDescriptor.getValue(), configurations);
 
+          // Replace _HOST with the supplied hostname is either exist
+          if (!StringUtils.isEmpty(hostname)) {
+            principal = principal.replace("_HOST", hostname);
+          }
+
           // If this principal is already in the Ambari database, then don't try to recreate it or it's
           // keytab file.
           if (!kerberosPrincipalDAO.exists(principal)) {
@@ -1422,7 +1521,7 @@ public class KerberosHelperImpl implements KerberosHelper {
 
             result = injector.getInstance(CreatePrincipalsServerAction.class).createPrincipal(
                 principal,
-                false,
+                KerberosPrincipalType.SERVICE.equals(expectedType),
                 kerberosEnvProperties,
                 kerberosOperationHandler,
                 false,
@@ -1434,7 +1533,7 @@ public class KerberosHelperImpl implements KerberosHelper {
               KerberosKeytabDescriptor keytabDescriptor = identityDescriptor.getKeytabDescriptor();
 
               if (keytabDescriptor != null) {
-                Keytab keytab = injector.getInstance(CreateKeytabFilesServerAction.class).createKeytab(
+                keytab = injector.getInstance(CreateKeytabFilesServerAction.class).createKeytab(
                     principal,
                     result.getPassword(),
                     result.getKeyNumber(),
@@ -1447,15 +1546,13 @@ public class KerberosHelperImpl implements KerberosHelper {
                   throw new AmbariException("Failed to create the keytab for " + principal);
                 }
               }
-
-              created = true;
             }
           }
         }
       }
     }
 
-    return created;
+    return keytab;
   }
 
   /**
@@ -1657,7 +1754,7 @@ public class KerberosHelperImpl implements KerberosHelper {
 
     // Add the finalize stage...
     handler.addFinalizeOperationStage(cluster, clusterHostInfoJson, hostParamsJson, event,
-        dataDirectory, roleCommandOrder, requestStageContainer);
+        dataDirectory, roleCommandOrder, requestStageContainer, kerberosDetails);
 
     // If all goes well, set the appropriate states on the relevant ServiceComponentHosts
     for (ServiceComponentHost sch : schToProcess) {
@@ -1859,7 +1956,7 @@ public class KerberosHelperImpl implements KerberosHelper {
 
 
       handler.addFinalizeOperationStage(cluster, clusterHostInfoJson, hostParamsJson, event,
-          dataDirectory, roleCommandOrder, requestStageContainer);
+          dataDirectory, roleCommandOrder, requestStageContainer, kerberosDetails);
     }
 
     return requestStageContainer;
@@ -1954,24 +2051,16 @@ public class KerberosHelperImpl implements KerberosHelper {
    * @return a File pointing to the new temporary directory, or null if one was not created
    * @throws AmbariException if a new temporary directory cannot be created
    */
-  private File createTemporaryDirectory() throws AmbariException {
-    String tempDirectoryPath = configuration.getProperty(Configuration.SERVER_TMP_DIR.getKey());
-
-    if ((tempDirectoryPath == null) || tempDirectoryPath.isEmpty()) {
-      tempDirectoryPath = System.getProperty("java.io.tmpdir");
-    }
-
+  protected File createTemporaryDirectory() throws AmbariException {
     try {
-      if (tempDirectoryPath == null) {
-        throw new IOException("The System property 'java.io.tmpdir' does not specify a temporary directory");
-      }
+      File temporaryDirectory = getConfiguredTemporaryDirectory();
 
       File directory;
       int tries = 0;
       long now = System.currentTimeMillis();
 
       do {
-        directory = new File(tempDirectoryPath, String.format("%s%d-%d.d",
+        directory = new File(temporaryDirectory, String.format("%s%d-%d.d",
             KerberosServerAction.DATA_DIRECTORY_PREFIX, now, tries));
 
         if ((directory.exists()) || !directory.mkdirs()) {
@@ -1982,7 +2071,7 @@ public class KerberosHelperImpl implements KerberosHelper {
       } while ((directory == null) && (++tries < 100));
 
       if (directory == null) {
-        throw new IOException(String.format("Failed to create a temporary directory in %s", tempDirectoryPath));
+        throw new IOException(String.format("Failed to create a temporary directory in %s", temporaryDirectory));
       }
 
       return directory;
@@ -2085,6 +2174,43 @@ public class KerberosHelperImpl implements KerberosHelper {
     }
   }
 
+  /**
+   * Creates a temporary file within the system temporary directory
+   * <p/>
+   * The resulting file is to be removed by the caller when desired.
+   *
+   * @return a File pointing to the new temporary file, or null if one was not created
+   * @throws AmbariException if a new temporary directory cannot be created
+   */
+  protected File createTemporaryFile() throws AmbariException {
+    try {
+      return File.createTempFile("tmp", ".tmp", getConfiguredTemporaryDirectory());
+    } catch (IOException e) {
+      String message = "Failed to create a temporary file.";
+      LOG.error(message, e);
+      throw new AmbariException(message, e);
+    }
+  }
+
+  /**
+   * Gets the configured temporary directory.
+   *
+   * @return a File pointing to the configured temporary directory
+   * @throws IOException
+   */
+  protected File getConfiguredTemporaryDirectory() throws IOException {
+    String tempDirectoryPath = configuration.getServerTempDir();
+
+    if (StringUtils.isEmpty(tempDirectoryPath)) {
+      tempDirectoryPath = System.getProperty("java.io.tmpdir");
+    }
+
+    if (tempDirectoryPath == null) {
+      throw new IOException("The System property 'java.io.tmpdir' does not specify a temporary directory");
+    }
+
+    return new File(tempDirectoryPath);
+  }
 
   /**
    * Creates a new stage
@@ -2277,9 +2403,10 @@ public class KerberosHelperImpl implements KerberosHelper {
    *                           services
    * @param componentName      the name of a component for which to find results, null indicates all
    *                           components
-   * @param kerberosDescriptor the relevant Kerberos Descriptor     @return a list of KerberosIdentityDescriptors representing the active identities for the
-   * requested service component
+   * @param kerberosDescriptor the relevant Kerberos Descriptor
+   *                           requested service component
    * @param filterContext      the context to use for filtering identities based on the state of the cluster
+   * @return a list of KerberosIdentityDescriptors representing the active identities for the
    * @throws AmbariException if an error occurs processing the cluster's active identities
    */
   private List<KerberosIdentityDescriptor> getActiveIdentities(Cluster cluster,
@@ -2973,12 +3100,14 @@ public class KerberosHelperImpl implements KerberosHelper {
 
     public void addFinalizeOperationStage(Cluster cluster, String clusterHostInfoJson,
                                           String hostParamsJson, ServiceComponentHostServerActionEvent event,
-                                          File dataDirectory,
-                                          RoleCommandOrder roleCommandOrder, RequestStageContainer requestStageContainer)
+                                          File dataDirectory, RoleCommandOrder roleCommandOrder,
+                                          RequestStageContainer requestStageContainer,
+                                          KerberosDetails kerberosDetails)
         throws AmbariException {
 
       // Add the finalize stage...
       Map<String, String> commandParameters = new HashMap<String, String>();
+      commandParameters.put(KerberosServerAction.DEFAULT_REALM, kerberosDetails.getDefaultRealm());
       commandParameters.put(KerberosServerAction.AUTHENTICATED_USER_NAME, ambariManagementController.getAuthName());
       if (dataDirectory != null) {
         commandParameters.put(KerberosServerAction.DATA_DIRECTORY, dataDirectory.getAbsolutePath());

http://git-wip-us.apache.org/repos/asf/ambari/blob/1bf20690/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/ConfigureAmbariIdentitiesServerAction.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/ConfigureAmbariIdentitiesServerAction.java b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/ConfigureAmbariIdentitiesServerAction.java
index 10647e8..80acd0d 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/ConfigureAmbariIdentitiesServerAction.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/ConfigureAmbariIdentitiesServerAction.java
@@ -126,7 +126,16 @@ public class ConfigureAmbariIdentitiesServerAction extends KerberosServerAction
           File srcKeytabFile = new File(hostDirectory, DigestUtils.sha1Hex(destKeytabFilePath));
 
           if (srcKeytabFile.exists()) {
-            installAmbariServerIdentity(evaluatedPrincipal, srcKeytabFile.getAbsolutePath(), destKeytabFilePath, actionLog);
+            String ownerAccess = identityRecord.get(KerberosIdentityDataFileReader.KEYTAB_FILE_OWNER_ACCESS);
+            boolean ownerWritable = "w".equalsIgnoreCase(ownerAccess) || "rw".equalsIgnoreCase(ownerAccess);
+            boolean ownerReadable = "r".equalsIgnoreCase(ownerAccess) || "rw".equalsIgnoreCase(ownerAccess);
+            String groupAccess = identityRecord.get(KerberosIdentityDataFileReader.KEYTAB_FILE_OWNER_ACCESS);
+            boolean groupWritable = "w".equalsIgnoreCase(groupAccess) || "rw".equalsIgnoreCase(groupAccess);
+            boolean groupReadable = "r".equalsIgnoreCase(groupAccess) || "rw".equalsIgnoreCase(groupAccess);
+
+            installAmbariServerIdentity(evaluatedPrincipal, srcKeytabFile.getAbsolutePath(), destKeytabFilePath,
+                identityRecord.get(KerberosIdentityDataFileReader.KEYTAB_FILE_OWNER_NAME), ownerReadable, ownerWritable,
+                identityRecord.get(KerberosIdentityDataFileReader.KEYTAB_FILE_GROUP_NAME), groupReadable, groupWritable, actionLog);
 
             if ("AMBARI_SERVER".equals(identityRecord.get(KerberosIdentityDataFileReader.COMPONENT))) {
               // Create/update the JAASFile...
@@ -147,6 +156,12 @@ public class ConfigureAmbariIdentitiesServerAction extends KerberosServerAction
    * @param principal          the ambari server principal name
    * @param srcKeytabFilePath  the source location of the ambari server keytab file
    * @param destKeytabFilePath the destination location of the ambari server keytab file
+   * @param ownerName          the username for the owner of the generated keytab file
+   * @param ownerReadable      true if the owner should be able to read this file; otherwise false
+   * @param ownerWritable      true if the owner should be able to write to this file; otherwise false
+   * @param groupName          the name of the group for the generated keytab file
+   * @param groupReadable      true if the group should be able to read this file; otherwise false
+   * @param groupWritable      true if the group should be able to write to this file; otherwise false
    * @param actionLog          the logger
    * @return true if success; false otherwise
    * @throws AmbariException
@@ -154,46 +169,38 @@ public class ConfigureAmbariIdentitiesServerAction extends KerberosServerAction
   public boolean installAmbariServerIdentity(String principal,
                                              String srcKeytabFilePath,
                                              String destKeytabFilePath,
+                                             String ownerName, boolean ownerReadable, boolean ownerWritable,
+                                             String groupName, boolean groupReadable, boolean groupWritable,
                                              ActionLog actionLog) throws AmbariException {
 
-    // Use sudo to copy the file into place....
     try {
-      ShellCommandUtil.Result result;
+      // Copy the keytab file into place (creating the parent directory, if necessary...
+      copyFile(srcKeytabFilePath, destKeytabFilePath);
+      setFileACL(destKeytabFilePath,
+          ownerName, ownerReadable, ownerWritable,
+          groupName, groupReadable, groupWritable);
 
-      // Ensure the parent directory exists...
-      File destKeytabFile = new File(destKeytabFilePath);
-      result = ShellCommandUtil.mkdir(destKeytabFile.getParent(), true);
-      if (!result.isSuccessful()) {
-        throw new AmbariException(result.getStderr());
-      }
+      String ambariServerHostName = StageUtils.getHostName();
+      HostEntity ambariServerHostEntity = hostDAO.findByName(ambariServerHostName);
+      Long ambariServerHostID = (ambariServerHostEntity == null)
+          ? null
+          : ambariServerHostEntity.getHostId();
 
-      // Copy the keytab file into place...
-      result = ShellCommandUtil.copyFile(srcKeytabFilePath, destKeytabFilePath, true, true);
-      if (!result.isSuccessful()) {
-        throw new AmbariException(result.getStderr());
-      } else {
-        String ambariServerHostName = StageUtils.getHostName();
-        HostEntity ambariServerHostEntity = hostDAO.findByName(ambariServerHostName);
-        Long ambariServerHostID = (ambariServerHostEntity == null)
-            ? null
-            : ambariServerHostEntity.getHostId();
-
-        if (ambariServerHostID == null) {
-          String message = String.format("Failed to add the kerberos_principal_host record for %s on " +
-                  "the Ambari server host since the host id for Ambari server host, %s, was not found." +
-                  "  This is not an error if an Ambari agent is not installed on the Ambari server host.",
-              principal, ambariServerHostName);
-          LOG.warn(message);
-          if(actionLog != null) {
-            actionLog.writeStdErr(message);
-          }
-        } else if (!kerberosPrincipalHostDAO.exists(principal, ambariServerHostID)) {
-          kerberosPrincipalHostDAO.create(principal, ambariServerHostID);
+      if (ambariServerHostID == null) {
+        String message = String.format("Failed to add the kerberos_principal_host record for %s on " +
+                "the Ambari server host since the host id for Ambari server host, %s, was not found." +
+                "  This is not an error if an Ambari agent is not installed on the Ambari server host.",
+            principal, ambariServerHostName);
+        LOG.warn(message);
+        if (actionLog != null) {
+          actionLog.writeStdErr(message);
         }
+      } else if (!kerberosPrincipalHostDAO.exists(principal, ambariServerHostID)) {
+        kerberosPrincipalHostDAO.create(principal, ambariServerHostID);
+      }
 
-        if(actionLog != null) {
-          actionLog.writeStdOut(String.format("Created Ambari server keytab file for %s at %s", principal, destKeytabFile));
-        }
+      if (actionLog != null) {
+        actionLog.writeStdOut(String.format("Created Ambari server keytab file for %s at %s", principal, destKeytabFilePath));
       }
     } catch (InterruptedException | IOException e) {
       throw new AmbariException(e.getLocalizedMessage(), e);
@@ -202,8 +209,16 @@ public class ConfigureAmbariIdentitiesServerAction extends KerberosServerAction
     return true;
   }
 
-  private void configureJAAS(String evaluatedPrincipal, String keytabFilePath, ActionLog actionLog) {
-    String jaasConfPath = System.getProperty(KerberosChecker.JAVA_SECURITY_AUTH_LOGIN_CONFIG);
+  /**
+   * Configure Ambari's JAAS file to reflect the principal name and keytab file for Ambari's Kerberos
+   * identity.
+   *
+   * @param principal      the Ambari server's principal name
+   * @param keytabFilePath the absolute path to the Ambari server's keytab file
+   * @param actionLog      the logger
+   */
+  public void configureJAAS(String principal, String keytabFilePath, ActionLog actionLog) {
+    String jaasConfPath = getJAASConfFilePath();
     if (jaasConfPath != null) {
       File jaasConfigFile = new File(jaasConfPath);
       try {
@@ -211,16 +226,16 @@ public class ConfigureAmbariIdentitiesServerAction extends KerberosServerAction
         File oldJaasConfigFile = new File(jaasConfPath + ".bak");
         FileUtils.writeStringToFile(oldJaasConfigFile, jaasConfig);
         jaasConfig = jaasConfig.replaceFirst(KEYTAB_PATTERN, "keyTab=\"" + keytabFilePath + "\"");
-        jaasConfig = jaasConfig.replaceFirst(PRINCIPAL_PATTERN, "principal=\"" + evaluatedPrincipal + "\"");
+        jaasConfig = jaasConfig.replaceFirst(PRINCIPAL_PATTERN, "principal=\"" + principal + "\"");
         FileUtils.writeStringToFile(jaasConfigFile, jaasConfig);
-        String message = String.format("JAAS config file %s modified successfully for principal %s.", jaasConfigFile
-            .getName(), evaluatedPrincipal);
+        String message = String.format("JAAS config file %s modified successfully for principal %s.",
+            jaasConfigFile.getName(), principal);
         if (actionLog != null) {
           actionLog.writeStdOut(message);
         }
       } catch (IOException e) {
-        String message = String.format("Failed to configure JAAS file %s for %s - %s", jaasConfigFile,
-            evaluatedPrincipal, e.getMessage());
+        String message = String.format("Failed to configure JAAS file %s for %s - %s",
+            jaasConfigFile, principal, e.getMessage());
         if (actionLog != null) {
           actionLog.writeStdErr(message);
         }
@@ -236,4 +251,97 @@ public class ConfigureAmbariIdentitiesServerAction extends KerberosServerAction
     }
   }
 
+  /**
+   * Copies the specified source file to the specified destination path, creating any needed parent
+   * directories.
+   * <p>
+   * This method is mocked in unit tests to avoid dealing with ShellCommandUtil in a mocked env.
+   *
+   * @param srcKeytabFilePath  the source location of the ambari server keytab file
+   * @param destKeytabFilePath the destination location of the ambari server keytab file
+   * @throws IOException
+   * @throws InterruptedException
+   * @throws AmbariException
+   * @see ShellCommandUtil#mkdir(String, boolean);
+   * @see ShellCommandUtil#copyFile(String, String, boolean, boolean)
+   */
+  void copyFile(String srcKeytabFilePath, String destKeytabFilePath)
+      throws IOException, InterruptedException {
+
+    ShellCommandUtil.Result result;
+
+    // Create the parent directory if necessary (using sudo)
+    File destKeytabFile = new File(destKeytabFilePath);
+    result = ShellCommandUtil.mkdir(destKeytabFile.getParent(), true);
+    if (!result.isSuccessful()) {
+      throw new AmbariException(result.getStderr());
+    }
+
+    // Copy the file (using sudo)
+    result = ShellCommandUtil.copyFile(srcKeytabFilePath, destKeytabFilePath, true, true);
+    if (!result.isSuccessful()) {
+      throw new AmbariException(result.getStderr());
+    }
+  }
+
+  /**
+   * Sets the access control list for this specified file.
+   * <p>
+   * The owner and group for the file is set as well as the owner's and group's ability to read and write
+   * the file.
+   * <p>
+   * The result of the operation to set the group for the file is ignored since it is possible that
+   * the group does not exist when performing this operation. It is expected this issue will be remedied
+   * when the group becomes available.
+   * <p>
+   * Access for other users is denied and the file is assumed to not be executeable by anyone.
+   *
+   * @param filePath      the path to the file
+   * @param ownerName     the username for the owner of the generated keytab file
+   * @param ownerWritable true if the owner should be able to write to this file; otherwise false
+   * @param ownerReadable true if the owner should be able to read this file; otherwise false
+   * @param groupName     the name of the group for the generated keytab file
+   * @param groupWritable true if the group should be able to write to this file; otherwise false
+   * @param groupReadable true if the group should be able to read this file; otherwise false
+   * @throws AmbariException if an error occurs setting the permissions on the fils
+   */
+  void setFileACL(String filePath,
+                          String ownerName, boolean ownerReadable, boolean ownerWritable,
+                          String groupName, boolean groupReadable, boolean groupWritable)
+      throws AmbariException {
+
+    ShellCommandUtil.Result result;
+
+    result = ShellCommandUtil.setFileOwner(filePath, ownerName);
+
+    if (result.isSuccessful()) {
+      result = ShellCommandUtil.setFileGroup(filePath, groupName);
+
+      if (!result.isSuccessful()) {
+        // Ignore, but log, this it is possible that the group does not exist when performing this operation
+        LOG.warn("Failed to set the group for the file at {} to {}: {}", filePath, groupName, result.getStderr());
+      }
+
+      result = ShellCommandUtil.setFileMode(filePath,
+          ownerReadable, ownerWritable, false,
+          groupReadable, groupWritable, false,
+          false, false, false);
+    }
+
+    if (!result.isSuccessful()) {
+      throw new AmbariException(result.getStderr());
+    }
+  }
+
+  /**
+   * Gets the location of Ambari's JAAS config file.
+   * <p>
+   * This method is mocked in unit tests to avoid having to alter the System properties in
+   * order to locate the test JAAS config file.
+   *
+   * @return the path to Ambari's JAAS config file
+   */
+  String getJAASConfFilePath() {
+    return System.getProperty(KerberosChecker.JAVA_SECURITY_AUTH_LOGIN_CONFIG);
+  }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/1bf20690/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/FinalizeKerberosServerAction.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/FinalizeKerberosServerAction.java b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/FinalizeKerberosServerAction.java
index 7205d09..d485eb3 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/FinalizeKerberosServerAction.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/FinalizeKerberosServerAction.java
@@ -26,14 +26,19 @@ import org.apache.ambari.server.state.Cluster;
 import org.apache.ambari.server.state.Host;
 import org.apache.ambari.server.state.SecurityState;
 import org.apache.ambari.server.state.ServiceComponentHost;
+import org.apache.ambari.server.utils.ShellCommandUtil;
+import org.apache.ambari.server.utils.StageUtils;
 import org.apache.commons.io.FileUtils;
+import org.apache.commons.lang.StringUtils;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import java.io.File;
 import java.io.IOException;
+import java.util.HashSet;
 import java.util.List;
 import java.util.Map;
+import java.util.Set;
 import java.util.concurrent.ConcurrentMap;
 
 public class FinalizeKerberosServerAction extends KerberosServerAction {
@@ -42,7 +47,10 @@ public class FinalizeKerberosServerAction extends KerberosServerAction {
   /**
    * Processes an identity as necessary.
    * <p/>
-   * This method is not used since the {@link #processIdentities(java.util.Map)} is not invoked
+   * This implementation ensures that keytab files for the Ambari identities have the correct
+   * permissions.  This is important in the event a secure cluster was created via Blueprints since
+   * some user accounts and groups may not have been available (at the OS level) when the keytab files
+   * were created.
    *
    * @param identityRecord           a Map containing the data for the current identity record
    * @param evaluatedPrincipal       a String indicating the relevant principal
@@ -61,6 +69,91 @@ public class FinalizeKerberosServerAction extends KerberosServerAction {
                                           Map<String, Object> requestSharedDataContext)
       throws AmbariException {
 
+    if (identityRecord != null) {
+      // If the record's HOSTNAME value is "ambari-server", rather than an actual hostname it will
+      // not match the Ambari server's host name. This will occur if the there is no agent installed
+      // on the Ambari server host.  This is ok, since any keytab files installed on the Ambari server
+      // host will already have the permissions set so that only the Ambari server can read it.
+      // There is no need to update the permissions for those keytab files so that installed services
+      // can access them since no services will be installed on the host.
+      if (StageUtils.getHostName().equals(identityRecord.get(KerberosIdentityDataFile.HOSTNAME))) {
+
+        // If the principal name exists in one of the shared data maps, it has been processed by the
+        // current "Enable Kerberos" or "Add component" workflow and therefore should already have
+        // the correct permissions assigned. The relevant keytab files can be skipped.
+        Map<String, String> principalPasswordMap = getPrincipalPasswordMap(requestSharedDataContext);
+        if ((principalPasswordMap == null) || !principalPasswordMap.containsKey(evaluatedPrincipal)) {
+
+          String keytabFilePath = identityRecord.get(KerberosIdentityDataFile.KEYTAB_FILE_PATH);
+
+          if (!StringUtils.isEmpty(keytabFilePath)) {
+            Set<String> visited = (Set<String>) requestSharedDataContext.get(this.getClass().getName() + "_visited");
+
+            if (!visited.contains(keytabFilePath)) {
+              String ownerName = identityRecord.get(KerberosIdentityDataFile.KEYTAB_FILE_OWNER_NAME);
+              String ownerAccess = identityRecord.get(KerberosIdentityDataFileReader.KEYTAB_FILE_OWNER_ACCESS);
+              boolean ownerWritable = "w".equalsIgnoreCase(ownerAccess) || "rw".equalsIgnoreCase(ownerAccess);
+              boolean ownerReadable = "r".equalsIgnoreCase(ownerAccess) || "rw".equalsIgnoreCase(ownerAccess);
+              String groupName = identityRecord.get(KerberosIdentityDataFile.KEYTAB_FILE_GROUP_NAME);
+              String groupAccess = identityRecord.get(KerberosIdentityDataFileReader.KEYTAB_FILE_OWNER_ACCESS);
+              boolean groupWritable = "w".equalsIgnoreCase(groupAccess) || "rw".equalsIgnoreCase(groupAccess);
+              boolean groupReadable = "r".equalsIgnoreCase(groupAccess) || "rw".equalsIgnoreCase(groupAccess);
+
+              ShellCommandUtil.Result result;
+              String message;
+
+              result = ShellCommandUtil.setFileOwner(keytabFilePath, ownerName);
+              if (result.isSuccessful()) {
+                message = String.format("Updated the owner of the keytab file at %s to %s",
+                    keytabFilePath, ownerName);
+                LOG.info(message);
+                actionLog.writeStdOut(message);
+              } else {
+                message = String.format("Failed to update the owner of the keytab file at %s to %s: %s",
+                    keytabFilePath, ownerName, result.getStderr());
+                LOG.error(message);
+                actionLog.writeStdOut(message);
+                actionLog.writeStdErr(message);
+              }
+
+              result = ShellCommandUtil.setFileGroup(keytabFilePath, groupName);
+              if (result.isSuccessful()) {
+                message = String.format("Updated the group of the keytab file at %s to %s",
+                    keytabFilePath, groupName);
+                LOG.info(message);
+                actionLog.writeStdOut(message);
+              } else {
+                message = String.format("Failed to update the group of the keytab file at %s to %s: %s",
+                    keytabFilePath, groupName, result.getStderr());
+                LOG.error(message);
+                actionLog.writeStdOut(message);
+                actionLog.writeStdErr(message);
+              }
+
+              result = ShellCommandUtil.setFileMode(keytabFilePath,
+                  ownerReadable, ownerWritable, false,
+                  groupReadable, groupWritable, false,
+                  false, false, false);
+              if (result.isSuccessful()) {
+                message = String.format("Updated the access mode of the keytab file at %s to owner:'%s' and group:'%s'",
+                    keytabFilePath, ownerAccess, groupAccess);
+                LOG.info(message);
+                actionLog.writeStdOut(message);
+              } else {
+                message = String.format("Failed to update the access mode of the keytab file at %s to owner:'%s' and group:'%s': %s",
+                    keytabFilePath, ownerAccess, groupAccess, result.getStderr());
+                LOG.error(message);
+                actionLog.writeStdOut(message);
+                actionLog.writeStdErr(message);
+              }
+
+              visited.add(keytabFilePath);
+            }
+          }
+        }
+      }
+    }
+
     return null;
   }
 
@@ -93,20 +186,27 @@ public class FinalizeKerberosServerAction extends KerberosServerAction {
 
             sch.setSecurityState(sch.getDesiredSecurityState());
             ChangeSecurityStateKerberosAuditEvent auditEvent = ChangeSecurityStateKerberosAuditEvent.builder()
-              .withTimestamp(System.currentTimeMillis())
-              .withService(sch.getServiceName())
-              .withComponent(sch.getServiceComponentName())
-              .withHostName(sch.getHostName())
-              .withState(sch.getDesiredSecurityState().toString())
-              .withRequestId(getHostRoleCommand().getRequestId())
-              .withTaskId(getHostRoleCommand().getTaskId())
-              .build();
+                .withTimestamp(System.currentTimeMillis())
+                .withService(sch.getServiceName())
+                .withComponent(sch.getServiceComponentName())
+                .withHostName(sch.getHostName())
+                .withState(sch.getDesiredSecurityState().toString())
+                .withRequestId(getHostRoleCommand().getRequestId())
+                .withTaskId(getHostRoleCommand().getTaskId())
+                .build();
             auditLog(auditEvent);
           }
         }
       }
     }
 
+    // Ensure the keytab files for the Ambari identities have the correct permissions
+    // This is important in the event a secure cluster was created via Blueprints since some
+    // user accounts and group may not have been created when the keytab files were created.
+    requestSharedDataContext.put(this.getClass().getName() + "_visited", new HashSet<String>());
+    processIdentities(requestSharedDataContext);
+    requestSharedDataContext.remove(this.getClass().getName() + "_visited");
+
     // Make sure this is a relevant directory. We don't want to accidentally allow _ANY_ directory
     // to be deleted.
     if ((dataDirectoryPath != null) && dataDirectoryPath.contains("/" + DATA_DIRECTORY_PREFIX)) {

http://git-wip-us.apache.org/repos/asf/ambari/blob/1bf20690/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/KerberosOperationHandler.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/KerberosOperationHandler.java b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/KerberosOperationHandler.java
index 02cbb57..51d1398 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/KerberosOperationHandler.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/KerberosOperationHandler.java
@@ -409,7 +409,7 @@ public abstract class KerberosOperationHandler {
    * @return true if the keytab file was successfully created; false otherwise
    * @throws KerberosOperationException
    */
-  protected boolean createKeytabFile(Keytab keytab, File destinationKeytabFile)
+  public boolean createKeytabFile(Keytab keytab, File destinationKeytabFile)
       throws KerberosOperationException {
 
     if (destinationKeytabFile == null) {

http://git-wip-us.apache.org/repos/asf/ambari/blob/1bf20690/ambari-server/src/main/java/org/apache/ambari/server/utils/ShellCommandUtil.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/utils/ShellCommandUtil.java b/ambari-server/src/main/java/org/apache/ambari/server/utils/ShellCommandUtil.java
index 39981ef..8aa9c08 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/utils/ShellCommandUtil.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/utils/ShellCommandUtil.java
@@ -17,6 +17,7 @@
  */
 package org.apache.ambari.server.utils;
 
+import org.apache.commons.lang.StringUtils;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 
@@ -39,6 +40,16 @@ public class ShellCommandUtil {
   private static final String KEY_TOKEN = "-key ";
   private static final String AMBARI_SUDO = "ambari-sudo.sh";
 
+  private static final int MODE_OWNER_READABLE = 400;
+  private static final int MODE_OWNER_WRITABLE = 200;
+  private static final int MODE_OWNER_EXECUTABLE = 100;
+  private static final int MODE_GROUP_READABLE = 40;
+  private static final int MODE_GROUP_WRITABLE = 20;
+  private static final int MODE_GROUP_EXECUTABLE = 10;
+  private static final int MODE_OTHER_READABLE = 4;
+  private static final int MODE_OTHER_WRITABLE = 2;
+  private static final int MODE_OTHER_EXECUTABLE = 1;
+
   /*
   public static String LogAndReturnOpenSslExitCode(String command, int exitCode) {
     logOpenSslExitCode(command, exitCode);
@@ -181,6 +192,116 @@ public class ShellCommandUtil {
   }
 
   /**
+   * Sets the owner for a file.
+   *
+   * @param path      the path to the file
+   * @param ownerName the owner's local username
+   * @return the result of the operation
+   */
+  public static Result setFileOwner(String path, String ownerName) {
+    if (LINUX) {
+      // Set the file owner, if the owner's username is given
+      if (!StringUtils.isEmpty(ownerName)) {
+        try {
+          return runCommand(new String[]{"chown", ownerName, path}, null, null, true);
+        } catch (IOException e) {
+          // Improbable
+          LOG.warn(String.format("Can not perform chown %s %s", ownerName, path), e);
+          return new Result(-1, "", "Cannot perform operation: " + e.getLocalizedMessage());
+        } catch (InterruptedException e) {
+          LOG.warn(String.format("Can not perform chown %s %s", ownerName, path), e);
+          return new Result(-1, "", "Cannot perform operation: " + e.getLocalizedMessage());
+        }
+      } else {
+        return new Result(0, "", "");
+      }
+    } else {
+      LOG.debug(String.format("Not performing chown command for file %s " +
+          "because current OS is not Linux ", path));
+      return new Result(-1, "", "Cannot perform operation: The current OS is not Linux");
+    }
+  }
+
+  /**
+   * Sets the group for a file.
+   *
+   * @param path      the path to the file
+   * @param groupName the group name
+   * @return the result of the operation
+   */
+  public static Result setFileGroup(String path,  String groupName) {
+    if (LINUX) {
+      // Set the file's group, if the group name is given
+      if (!StringUtils.isEmpty(groupName)) {
+        try {
+          return runCommand(new String[]{"chgrp", groupName, path}, null, null, true);
+        } catch (IOException e) {
+          // Improbable
+          LOG.warn(String.format("Can not perform chgrp %s %s", groupName, path), e);
+          return new Result(-1, "", "Cannot perform operation: " + e.getLocalizedMessage());
+        } catch (InterruptedException e) {
+          LOG.warn(String.format("Can not perform chgrp %s %s", groupName, path), e);
+          return new Result(-1, "", "Cannot perform operation: " + e.getLocalizedMessage());
+        }
+      } else {
+        return new Result(0, "", "");
+      }
+    } else {
+      LOG.debug(String.format("Not performing chgrp command for file %s " +
+          "because current OS is not Linux ", path));
+      return new Result(-1, "", "Cannot perform operation: The current OS is not Linux");
+    }
+  }
+
+  /**
+   * Set the access modes for a file
+   *
+   * @param path            the path to the file
+   * @param ownerWritable   true if the owner should be able to write to this file; otherwise false
+   * @param ownerReadable   true if the owner should be able to read this file; otherwise false
+   * @param ownerExecutable true if the owner should be able to execute this file; otherwise false
+   * @param groupWritable   true if the group should be able to write to this file; otherwise false
+   * @param groupReadable   true if the group should be able to read this file; otherwise false
+   * @param groupExecutable true if the group should be able to execute this file; otherwise false
+   * @param otherReadable   true if other users should be able to read this file; otherwise false
+   * @param otherWritable   true if other users should be able to write to this file; otherwise false
+   * @param otherExecutable true if other users should be able to execute this file; otherwise false
+   * @return the result of the operation
+   */
+  public static Result setFileMode(String path,
+                                   boolean ownerReadable, boolean ownerWritable, boolean ownerExecutable,
+                                   boolean groupReadable, boolean groupWritable, boolean groupExecutable,
+                                   boolean otherReadable, boolean otherWritable, boolean otherExecutable) {
+    if (LINUX) {
+      int modeValue = ((ownerReadable) ? MODE_OWNER_READABLE : 0) +
+          ((ownerWritable) ? MODE_OWNER_WRITABLE : 0) +
+          ((ownerExecutable) ? MODE_OWNER_EXECUTABLE : 0) +
+          ((groupReadable) ? MODE_GROUP_READABLE : 0) +
+          ((groupWritable) ? MODE_GROUP_WRITABLE : 0) +
+          ((groupExecutable) ? MODE_GROUP_EXECUTABLE : 0) +
+          ((otherReadable) ? MODE_OTHER_READABLE : 0) +
+          ((otherWritable) ? MODE_OTHER_WRITABLE : 0) +
+          ((otherExecutable) ? MODE_OTHER_EXECUTABLE : 0);
+      String mode = String.format("%04d", modeValue);
+
+      try {
+        return runCommand(new String[]{"chmod", mode, path}, null, null, true);
+      } catch (IOException e) {
+        // Improbable
+        LOG.warn(String.format("Can not perform chmod %s %s", mode, path), e);
+        return new Result(-1, "", "Cannot perform operation: " + e.getLocalizedMessage());
+      } catch (InterruptedException e) {
+        LOG.warn(String.format("Can not perform chmod %s %s", mode, path), e);
+        return new Result(-1, "", "Cannot perform operation: " + e.getLocalizedMessage());
+      }
+    } else {
+      LOG.debug(String.format("Not performing chmod command for file %s " +
+          "because current OS is not Linux ", path));
+      return new Result(-1, "", "Cannot perform operation: The current OS is not Linux");
+    }
+  }
+
+  /**
    * Test if a file or directory exists
    *
    * @param path the path to test

http://git-wip-us.apache.org/repos/asf/ambari/blob/1bf20690/ambari-server/src/main/resources/stacks/HDP/2.0.6/kerberos.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/kerberos.json b/ambari-server/src/main/resources/stacks/HDP/2.0.6/kerberos.json
index 3aad080..0644f57 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/kerberos.json
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/kerberos.json
@@ -9,7 +9,7 @@
       "name": "spnego",
       "principal": {
         "value": "HTTP/_HOST@${realm}",
-        "type" : "service"
+        "type": "service"
       },
       "keytab": {
         "file": "${keytab_dir}/spnego.service.keytab",
@@ -27,9 +27,9 @@
       "name": "smokeuser",
       "principal": {
         "value": "${cluster-env/smokeuser}-${cluster_name|toLower()}@${realm}",
-        "type" : "user",
+        "type": "user",
         "configuration": "cluster-env/smokeuser_principal_name",
-        "local_username" : "${cluster-env/smokeuser}"
+        "local_username": "${cluster-env/smokeuser}"
       },
       "keytab": {
         "file": "${keytab_dir}/smokeuser.headless.keytab",
@@ -48,13 +48,15 @@
       "name": "ambari-server",
       "principal": {
         "value": "ambari-server-${cluster_name|toLower()}@${realm}",
-        "type" : "user",
+        "type": "user",
         "configuration": "cluster-env/ambari_principal_name"
       },
       "keytab": {
-        "file": "${keytab_dir}/ambari.server.keytab"
+        "file": "${keytab_dir}/ambari.server.keytab",
+        "owner": {
+          "access": "r"
+        }
       }
     }
   ]
-
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/1bf20690/ambari-server/src/test/java/org/apache/ambari/server/controller/KerberosHelperTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/KerberosHelperTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/KerberosHelperTest.java
index 7e6a056..b2eb738 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/KerberosHelperTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/KerberosHelperTest.java
@@ -37,16 +37,19 @@ import org.apache.ambari.server.audit.AuditLogger;
 import org.apache.ambari.server.configuration.Configuration;
 import org.apache.ambari.server.controller.internal.RequestStageContainer;
 import org.apache.ambari.server.controller.spi.ClusterController;
+import org.apache.ambari.server.controller.utilities.KerberosChecker;
 import org.apache.ambari.server.metadata.RoleCommandOrder;
 import org.apache.ambari.server.orm.DBAccessor;
 import org.apache.ambari.server.orm.dao.ArtifactDAO;
 import org.apache.ambari.server.orm.dao.HostRoleCommandDAO;
+import org.apache.ambari.server.orm.dao.KerberosPrincipalDAO;
 import org.apache.ambari.server.security.SecurityHelper;
 import org.apache.ambari.server.security.credential.PrincipalKeyCredential;
 import org.apache.ambari.server.security.encryption.CredentialStoreService;
 import org.apache.ambari.server.security.encryption.CredentialStoreServiceImpl;
 import org.apache.ambari.server.security.encryption.CredentialStoreType;
 import org.apache.ambari.server.serveraction.ActionLog;
+import org.apache.ambari.server.serveraction.kerberos.ConfigureAmbariIdentitiesServerAction;
 import org.apache.ambari.server.serveraction.kerberos.CreateKeytabFilesServerAction;
 import org.apache.ambari.server.serveraction.kerberos.CreatePrincipalsServerAction;
 import org.apache.ambari.server.serveraction.kerberos.KDCType;
@@ -94,9 +97,13 @@ import org.easymock.EasyMockSupport;
 import org.easymock.IAnswer;
 import org.junit.After;
 import org.junit.Before;
+import org.junit.Rule;
 import org.junit.Test;
+import org.junit.rules.TemporaryFolder;
 
 import javax.persistence.EntityManager;
+import java.io.File;
+import java.lang.reflect.Method;
 import java.net.InetAddress;
 import java.text.SimpleDateFormat;
 import java.util.ArrayList;
@@ -113,6 +120,7 @@ import java.util.concurrent.TimeUnit;
 
 import static org.easymock.EasyMock.anyLong;
 import static org.easymock.EasyMock.anyObject;
+import static org.easymock.EasyMock.anyString;
 import static org.easymock.EasyMock.capture;
 import static org.easymock.EasyMock.eq;
 import static org.easymock.EasyMock.expect;
@@ -139,6 +147,9 @@ public class KerberosHelperTest extends EasyMockSupport {
   private final TopologyManager topologyManager = createMock(TopologyManager.class);
   private final Configuration configuration = createMock(Configuration.class);
 
+  @Rule
+  public TemporaryFolder temporaryFolder = new TemporaryFolder();
+
   @Before
   public void setUp() throws Exception {
     reset(clusterController);
@@ -183,9 +194,22 @@ public class KerberosHelperTest extends EasyMockSupport {
           public boolean removePrincipal(String principal) throws KerberosOperationException {
             return false;
           }
+
+          @Override
+          public boolean createKeytabFile(Keytab keytab, File destinationKeytabFile) throws KerberosOperationException {
+            return true;
+          }
         })
         .anyTimes();
 
+    Method methodGetConfiguredTemporaryDirectory = KerberosHelperImpl.class.getDeclaredMethod("getConfiguredTemporaryDirectory");
+
+    final KerberosHelperImpl kerberosHelper = createMockBuilder(KerberosHelperImpl.class)
+        .addMockedMethod(methodGetConfiguredTemporaryDirectory)
+        .createMock();
+
+    expect(kerberosHelper.getConfiguredTemporaryDirectory()).andReturn(temporaryFolder.getRoot()).anyTimes();
+
     injector = Guice.createInjector(new AbstractModule() {
 
       @Override
@@ -210,14 +234,18 @@ public class KerberosHelperTest extends EasyMockSupport {
         bind(KerberosDescriptorFactory.class).toInstance(kerberosDescriptorFactory);
         bind(KerberosConfigDataFileWriterFactory.class).toInstance(kerberosConfigDataFileWriterFactory);
         bind(StackManagerFactory.class).toInstance(createNiceMock(StackManagerFactory.class));
-        bind(KerberosHelper.class).to(KerberosHelperImpl.class);
+        bind(KerberosHelper.class).toInstance(kerberosHelper);
         bind(CredentialStoreService.class).to(CredentialStoreServiceImpl.class);
         bind(CreatePrincipalsServerAction.class).toInstance(createMock(CreatePrincipalsServerAction.class));
         bind(CreateKeytabFilesServerAction.class).toInstance(createMock(CreateKeytabFilesServerAction.class));
+        bind(ConfigureAmbariIdentitiesServerAction.class).toInstance(createMock(ConfigureAmbariIdentitiesServerAction.class));
         bind(StackAdvisorHelper.class).toInstance(createMock(StackAdvisorHelper.class));
         bind(HostRoleCommandDAO.class).toInstance(createNiceMock(HostRoleCommandDAO.class));
         bind(AuditLogger.class).toInstance(createNiceMock(AuditLogger.class));
         bind(ArtifactDAO.class).toInstance(createNiceMock(ArtifactDAO.class));
+        bind(KerberosPrincipalDAO.class).toInstance(createNiceMock(KerberosPrincipalDAO.class));
+
+        requestStaticInjection(KerberosChecker.class);
       }
     });
 
@@ -225,10 +253,11 @@ public class KerberosHelperTest extends EasyMockSupport {
     StageUtils.setTopologyManager(topologyManager);
     expect(topologyManager.getPendingHostComponents()).andReturn(
         Collections.<String, Collection<String>>emptyMap()).anyTimes();
-    
+
     StageUtils.setConfiguration(configuration);
     expect(configuration.getApiSSLAuthentication()).andReturn(false).anyTimes();
     expect(configuration.getClientApiPort()).andReturn(8080).anyTimes();
+    expect(configuration.getServerTempDir()).andReturn(temporaryFolder.getRoot().getAbsolutePath()).anyTimes();
 
     CredentialStoreService credentialStoreService = injector.getInstance(CredentialStoreService.class);
     if (!credentialStoreService.isInitialized(CredentialStoreType.TEMPORARY)) {
@@ -1007,7 +1036,7 @@ public class KerberosHelperTest extends EasyMockSupport {
     final Config krb5ConfConfig = createMock(Config.class);
     expect(krb5ConfConfig.getProperties()).andReturn(krb5ConfProperties).anyTimes();
 
-    final Cluster cluster = createMockCluster(Collections.singleton(host), SecurityType.KERBEROS, krb5ConfConfig, kerberosEnvConfig);
+    final Cluster cluster = createMockCluster("c1", Collections.singleton(host), SecurityType.KERBEROS, krb5ConfConfig, kerberosEnvConfig);
     expect(cluster.getServices())
         .andReturn(new HashMap<String, Service>() {
           {
@@ -1198,7 +1227,7 @@ public class KerberosHelperTest extends EasyMockSupport {
     final Config krb5ConfConfig = createMock(Config.class);
     expect(krb5ConfConfig.getProperties()).andReturn(krb5ConfProperties).anyTimes();
 
-    final Cluster cluster = createMockCluster(Collections.singleton(host), SecurityType.KERBEROS, krb5ConfConfig, kerberosEnvConfig);
+    final Cluster cluster = createMockCluster("c1", Collections.singleton(host), SecurityType.KERBEROS, krb5ConfConfig, kerberosEnvConfig);
     expect(cluster.getServices())
         .andReturn(new HashMap<String, Service>() {
           {
@@ -1384,7 +1413,7 @@ public class KerberosHelperTest extends EasyMockSupport {
     final Config krb5ConfConfig = createMock(Config.class);
     expect(krb5ConfConfig.getProperties()).andReturn(krb5ConfProperties).anyTimes();
 
-    final Cluster cluster = createMockCluster(Collections.singleton(host), SecurityType.NONE, krb5ConfConfig, kerberosEnvConfig);
+    final Cluster cluster = createMockCluster("c1", Collections.singleton(host), SecurityType.NONE, krb5ConfConfig, kerberosEnvConfig);
     expect(cluster.getServices())
         .andReturn(new HashMap<String, Service>() {
           {
@@ -1580,7 +1609,7 @@ public class KerberosHelperTest extends EasyMockSupport {
         ? Arrays.asList(host, hostInvalid)
         : Collections.singleton(host);
 
-    final Cluster cluster = createMockCluster(hosts, SecurityType.KERBEROS, krb5ConfConfig, kerberosEnvConfig);
+    final Cluster cluster = createMockCluster("c1", hosts, SecurityType.KERBEROS, krb5ConfConfig, kerberosEnvConfig);
     expect(cluster.getServices())
         .andReturn(new HashMap<String, Service>() {
           {
@@ -2157,7 +2186,7 @@ public class KerberosHelperTest extends EasyMockSupport {
     setupKerberosDescriptor(kerberosDescriptor, 2);
 
     RecommendationResponse.BlueprintConfigurations coreSiteRecommendation = createNiceMock(RecommendationResponse
-      .BlueprintConfigurations.class);
+        .BlueprintConfigurations.class);
     expect(coreSiteRecommendation.getProperties()).andReturn(Collections.singletonMap("newPropertyRecommendation", "newPropertyRecommendation"));
 
     RecommendationResponse.BlueprintConfigurations newTypeRecommendation = createNiceMock(RecommendationResponse.BlueprintConfigurations.class);
@@ -2248,7 +2277,7 @@ public class KerberosHelperTest extends EasyMockSupport {
     serviceComponentHostMap.put("COMPONENT2B", new HashSet<String>(Arrays.asList("hostB", "hostC")));
     serviceComponentHostMap.put("COMPONEN3A", Collections.singleton("hostA"));
 
-    final Cluster cluster = createMockCluster(hosts, SecurityType.KERBEROS, krb5ConfConfig, kerberosEnvConfig);
+    final Cluster cluster = createMockCluster("c1", hosts, SecurityType.KERBEROS, krb5ConfConfig, kerberosEnvConfig);
     expect(cluster.getServices()).andReturn(services).anyTimes();
     expect(cluster.getServiceComponentHostMap(anyObject(Set.class), anyObject(Set.class))).andReturn(serviceComponentHostMap).anyTimes();
 
@@ -2286,7 +2315,7 @@ public class KerberosHelperTest extends EasyMockSupport {
     // Needed by infrastructure
     injector.getInstance(AmbariMetaInfo.class).init();
 
-    HashMap<String,Set<String>> installedServices1 = new HashMap<String, Set<String>>();
+    HashMap<String, Set<String>> installedServices1 = new HashMap<String, Set<String>>();
     installedServices1.put("SERVICE1", new HashSet<String>(Arrays.asList("COMPONENT1A", "COMPONENT1B")));
     installedServices1.put("SERVICE2", new HashSet<String>(Arrays.asList("COMPONENT2A", "COMPONENT2B")));
     installedServices1.put("SERVICE3", Collections.singleton("COMPONENT3A"));
@@ -2294,7 +2323,7 @@ public class KerberosHelperTest extends EasyMockSupport {
     Map<String, Map<String, String>> updates1 = kerberosHelper.getServiceConfigurationUpdates(
         cluster, existingConfigurations, installedServices1, null, null, true, true);
 
-    HashMap<String,Set<String>> installedServices2 = new HashMap<String, Set<String>>();
+    HashMap<String, Set<String>> installedServices2 = new HashMap<String, Set<String>>();
     installedServices2.put("SERVICE1", new HashSet<String>(Arrays.asList("COMPONENT1A", "COMPONENT1B")));
     installedServices2.put("SERVICE3", Collections.singleton("COMPONENT3A"));
 
@@ -2380,7 +2409,7 @@ public class KerberosHelperTest extends EasyMockSupport {
             put("realm", "FOOBAR.COM");
             put("case_insensitive_username_rules", "false");
             put("create_ambari_principal", "false");
-        }
+          }
         });
         put("", new HashMap<String, String>() {
           {
@@ -2413,18 +2442,51 @@ public class KerberosHelperTest extends EasyMockSupport {
 
   @Test
   public void testEnsureHeadlessIdentities() throws Exception {
+    testEnsureHeadlessIdentities(false, false);
+  }
+
+  @Test
+  public void testEnsureHeadlessAndAmbariIdentitiesAsUser() throws Exception {
+    testEnsureHeadlessIdentities(true, false);
+  }
+
+  @Test
+  public void testEnsureHeadlessAndAmbariIdentitiesAsService() throws Exception {
+    testEnsureHeadlessIdentities(true, true);
+  }
+
+  private void testEnsureHeadlessIdentities(boolean createAmbariPrincipal, boolean ambariServerPrincipalAsService) throws Exception {
+    String clusterName = "c1";
+    String realm = "EXAMPLE.COM";
+    String ambariServerHostname = StageUtils.getHostName();
+    String ambariServerPrincipalName;
+    String ambariServerKeytabFilePath = new File("ambari.server.keytab").getAbsolutePath();
+    KerberosPrincipalType ambariServerPrincipalType;
+    String ambariServerPrincipalNameExpected;
+
+    if(ambariServerPrincipalAsService) {
+      ambariServerPrincipalName = "ambari-server-${cluster_name}/_HOST@${realm}";
+      ambariServerPrincipalType = KerberosPrincipalType.SERVICE;
+      ambariServerPrincipalNameExpected = String.format("ambari-server-%s/%s@%s", clusterName, ambariServerHostname, realm);
+    }
+    else {
+      ambariServerPrincipalName = "ambari-server-${cluster_name}@${realm}";
+      ambariServerPrincipalType = KerberosPrincipalType.USER;
+      ambariServerPrincipalNameExpected = String.format("ambari-server-%s@%s", clusterName, realm);
+    }
+
     Map<String, String> propertiesKrb5Conf = new HashMap<String, String>();
 
     Map<String, String> propertiesKerberosEnv = new HashMap<String, String>();
-    propertiesKerberosEnv.put("realm", "EXAMPLE.COM");
+    propertiesKerberosEnv.put("realm", realm);
     propertiesKerberosEnv.put("kdc_type", "mit-kdc");
     propertiesKerberosEnv.put("password_length", "20");
     propertiesKerberosEnv.put("password_min_lowercase_letters", "1");
     propertiesKerberosEnv.put("password_min_uppercase_letters", "1");
     propertiesKerberosEnv.put("password_min_digits", "1");
     propertiesKerberosEnv.put("password_min_punctuation", "0");
-    propertiesKerberosEnv.put("password_min_whitespace","0");
-    propertiesKerberosEnv.put("create_ambari_principal", "false");
+    propertiesKerberosEnv.put("password_min_whitespace", "0");
+    propertiesKerberosEnv.put("create_ambari_principal", (createAmbariPrincipal) ? "true" : "false");
 
     Config configKrb5Conf = createMock(Config.class);
     expect(configKrb5Conf.getProperties()).andReturn(propertiesKrb5Conf).times(1);
@@ -2455,7 +2517,7 @@ public class KerberosHelperTest extends EasyMockSupport {
     servicesMap.put("SERVICE1", service1);
     servicesMap.put("SERVICE2", service2);
 
-    Cluster cluster = createMockCluster(Arrays.asList(host1, host2, host3), SecurityType.KERBEROS, configKrb5Conf, configKerberosEnv);
+    Cluster cluster = createMockCluster(clusterName, Arrays.asList(host1, host2, host3), SecurityType.KERBEROS, configKrb5Conf, configKerberosEnv);
     expect(cluster.getServices()).andReturn(servicesMap).times(1);
 
     Map<String, String> kerberosDescriptorProperties = new HashMap<String, String>();
@@ -2514,6 +2576,36 @@ public class KerberosHelperTest extends EasyMockSupport {
     expect(kerberosDescriptor.getService("SERVICE1")).andReturn(service1KerberosDescriptor).times(1);
     expect(kerberosDescriptor.getService("SERVICE2")).andReturn(service2KerberosDescriptor).times(1);
 
+    if (createAmbariPrincipal) {
+      String spnegoPrincipalNameExpected = String.format("HTTP/%s@%s", ambariServerHostname, realm);
+
+      KerberosIdentityDescriptor ambariIdentity = createMockIdentityDescriptor(
+          KerberosHelper.AMBARI_IDENTITY_NAME,
+          createMockPrincipalDescriptor(ambariServerPrincipalName, ambariServerPrincipalType, "ambari", null),
+          createMockKeytabDescriptor(ambariServerKeytabFilePath, null));
+
+      KerberosIdentityDescriptor spnegoIdentity = createMockIdentityDescriptor(
+          KerberosHelper.SPNEGO_IDENTITY_NAME,
+          createMockPrincipalDescriptor("HTTP/_HOST@${realm}", KerberosPrincipalType.SERVICE, null, null),
+          createMockKeytabDescriptor("spnego.service.keytab", null));
+
+      expect(kerberosDescriptor.getIdentity(KerberosHelper.AMBARI_IDENTITY_NAME)).andReturn(ambariIdentity).once();
+      expect(kerberosDescriptor.getIdentity(KerberosHelper.SPNEGO_IDENTITY_NAME)).andReturn(spnegoIdentity).once();
+
+      ConfigureAmbariIdentitiesServerAction configureAmbariIdentitiesServerAction = injector.getInstance(ConfigureAmbariIdentitiesServerAction.class);
+      expect(configureAmbariIdentitiesServerAction.installAmbariServerIdentity(eq(ambariServerPrincipalNameExpected), anyString(), eq(ambariServerKeytabFilePath),
+          eq("user1"), eq(true), eq(true), eq("groupA"), eq(true), eq(false), (ActionLog) eq(null)))
+          .andReturn(true)
+          .once();
+      expect(configureAmbariIdentitiesServerAction.installAmbariServerIdentity(eq(spnegoPrincipalNameExpected), anyString(), eq("spnego.service.keytab"),
+          eq("user1"), eq(true), eq(true), eq("groupA"), eq(true), eq(false), (ActionLog) eq(null)))
+          .andReturn(true)
+          .once();
+
+      configureAmbariIdentitiesServerAction.configureJAAS(ambariServerPrincipalNameExpected, ambariServerKeytabFilePath, null);
+      expectLastCall().once();
+    }
+
     setupKerberosDescriptor(kerberosDescriptor, 1);
 
     Map<String, Map<String, String>> existingConfigurations = new HashMap<String, Map<String, String>>();
@@ -2530,15 +2622,30 @@ public class KerberosHelperTest extends EasyMockSupport {
     Capture<? extends String> capturePrincipalForKeytab = newCapture(CaptureType.ALL);
 
     CreatePrincipalsServerAction createPrincipalsServerAction = injector.getInstance(CreatePrincipalsServerAction.class);
-    expect(createPrincipalsServerAction.createPrincipal(capture(capturePrincipal), eq(false), anyObject(Map.class),  anyObject(KerberosOperationHandler.class), eq(false), isNull(ActionLog.class)))
+    expect(createPrincipalsServerAction.createPrincipal(capture(capturePrincipal), eq(false), anyObject(Map.class), anyObject(KerberosOperationHandler.class), eq(false), isNull(ActionLog.class)))
         .andReturn(new CreatePrincipalsServerAction.CreatePrincipalResult("anything", "password", 1))
         .times(3);
 
+    if(createAmbariPrincipal) {
+      if (ambariServerPrincipalAsService) {
+        expect(createPrincipalsServerAction.createPrincipal(capture(capturePrincipal), eq(true), anyObject(Map.class), anyObject(KerberosOperationHandler.class), eq(false), isNull(ActionLog.class)))
+            .andReturn(new CreatePrincipalsServerAction.CreatePrincipalResult("anything", "password", 1))
+            .times(2);
+      } else {
+        expect(createPrincipalsServerAction.createPrincipal(capture(capturePrincipal), eq(true), anyObject(Map.class), anyObject(KerberosOperationHandler.class), eq(false), isNull(ActionLog.class)))
+            .andReturn(new CreatePrincipalsServerAction.CreatePrincipalResult("anything", "password", 1))
+            .times(1);
+        expect(createPrincipalsServerAction.createPrincipal(capture(capturePrincipal), eq(false), anyObject(Map.class), anyObject(KerberosOperationHandler.class), eq(false), isNull(ActionLog.class)))
+            .andReturn(new CreatePrincipalsServerAction.CreatePrincipalResult("anything", "password", 1))
+            .times(1);
+      }
+    }
+
     CreateKeytabFilesServerAction createKeytabFilesServerAction = injector.getInstance(CreateKeytabFilesServerAction.class);
     expect(createKeytabFilesServerAction.createKeytab(capture(capturePrincipalForKeytab), eq("password"), eq(1), anyObject(KerberosOperationHandler.class), eq(true), eq(true), isNull(ActionLog.class)))
         .andReturn(new Keytab())
-        .times(3);
-    
+        .times(createAmbariPrincipal ? 5 : 3);
+
     replayAll();
 
     AmbariMetaInfo ambariMetaInfo = injector.getInstance(AmbariMetaInfo.class);
@@ -2546,7 +2653,7 @@ public class KerberosHelperTest extends EasyMockSupport {
 
     CredentialStoreService credentialStoreService = injector.getInstance(CredentialStoreService.class);
     credentialStoreService.setCredential(cluster.getClusterName(), KerberosHelper.KDC_ADMINISTRATOR_CREDENTIAL_ALIAS,
-      new PrincipalKeyCredential("principal", "password"), CredentialStoreType.TEMPORARY);
+        new PrincipalKeyCredential("principal", "password"), CredentialStoreType.TEMPORARY);
 
     KerberosHelper kerberosHelper = injector.getInstance(KerberosHelper.class);
     kerberosHelper.ensureHeadlessIdentities(cluster, existingConfigurations, services);
@@ -2554,16 +2661,26 @@ public class KerberosHelperTest extends EasyMockSupport {
     verifyAll();
 
     List<? extends String> capturedPrincipals = capturePrincipal.getValues();
-    assertEquals(3, capturedPrincipals.size());
+    assertEquals(createAmbariPrincipal ? 5 : 3, capturedPrincipals.size());
     assertTrue(capturedPrincipals.contains("s1_1@EXAMPLE.COM"));
     assertTrue(capturedPrincipals.contains("s1c1_1@EXAMPLE.COM"));
     assertTrue(capturedPrincipals.contains("s2_1@EXAMPLE.COM"));
 
     List<? extends String> capturedPrincipalsForKeytab = capturePrincipalForKeytab.getValues();
-    assertEquals(3, capturedPrincipalsForKeytab.size());
+    assertEquals(createAmbariPrincipal ? 5 : 3, capturedPrincipalsForKeytab.size());
     assertTrue(capturedPrincipalsForKeytab.contains("s1_1@EXAMPLE.COM"));
     assertTrue(capturedPrincipalsForKeytab.contains("s1c1_1@EXAMPLE.COM"));
     assertTrue(capturedPrincipalsForKeytab.contains("s2_1@EXAMPLE.COM"));
+
+    if(createAmbariPrincipal) {
+      String spnegoPrincipalName = String.format("HTTP/%s@EXAMPLE.COM", ambariServerHostname);
+
+      assertTrue(capturedPrincipals.contains(ambariServerPrincipalNameExpected));
+      assertTrue(capturedPrincipals.contains(spnegoPrincipalName));
+
+      assertTrue(capturedPrincipalsForKeytab.contains(ambariServerPrincipalNameExpected));
+      assertTrue(capturedPrincipalsForKeytab.contains(spnegoPrincipalName));
+    }
   }
 
   /**
@@ -2598,7 +2715,7 @@ public class KerberosHelperTest extends EasyMockSupport {
     Map<String, Service> servicesMap = new HashMap<String, Service>();
     servicesMap.put("SERVICE1", service1);
 
-    Cluster cluster = createMockCluster(Arrays.asList(host1), SecurityType.KERBEROS, configKrb5Conf, configKerberosEnv);
+    Cluster cluster = createMockCluster("c1", Arrays.asList(host1), SecurityType.KERBEROS, configKrb5Conf, configKerberosEnv);
     expect(cluster.getServices()).andReturn(servicesMap).times(1);
 
     Map<String, String> kerberosDescriptorProperties = new HashMap<String, String>();
@@ -2769,7 +2886,7 @@ public class KerberosHelperTest extends EasyMockSupport {
     final Config krb5ConfConfig = createMock(Config.class);
     expect(krb5ConfConfig.getProperties()).andReturn(krb5ConfProperties).anyTimes();
 
-    final Cluster cluster = createMockCluster(Arrays.asList(hostA, hostB, hostC), SecurityType.KERBEROS, krb5ConfConfig, kerberosEnvConfig);
+    final Cluster cluster = createMockCluster("c1", Arrays.asList(hostA, hostB, hostC), SecurityType.KERBEROS, krb5ConfConfig, kerberosEnvConfig);
     expect(cluster.getServices())
         .andReturn(new HashMap<String, Service>() {
           {
@@ -3014,7 +3131,7 @@ public class KerberosHelperTest extends EasyMockSupport {
     final Config krb5ConfConfig = createMock(Config.class);
     expect(krb5ConfConfig.getProperties()).andReturn(krb5ConfProperties).anyTimes();
 
-    final Cluster cluster = createMockCluster(Collections.singleton(host), SecurityType.KERBEROS, krb5ConfConfig, kerberosEnvConfig);
+    final Cluster cluster = createMockCluster("c1", Collections.singleton(host), SecurityType.KERBEROS, krb5ConfConfig, kerberosEnvConfig);
     expect(cluster.getServices())
         .andReturn(new HashMap<String, Service>() {
           {
@@ -3146,10 +3263,10 @@ public class KerberosHelperTest extends EasyMockSupport {
     KerberosHelper kerberosHelper = injector.getInstance(KerberosHelper.class);
     boolean managingIdentities = !Boolean.FALSE.equals(manageIdentities);
 
-    final Map<String, String> kerberosEnvProperties = new HashMap<String,String>();
-    kerberosEnvProperties.put("kdc_type","mit-kdc");
-    kerberosEnvProperties.put("realm","FOOBAR.COM");
-    kerberosEnvProperties.put("manage_identities","FOOBAR.COM");
+    final Map<String, String> kerberosEnvProperties = new HashMap<String, String>();
+    kerberosEnvProperties.put("kdc_type", "mit-kdc");
+    kerberosEnvProperties.put("realm", "FOOBAR.COM");
+    kerberosEnvProperties.put("manage_identities", "FOOBAR.COM");
     kerberosEnvProperties.put("manage_identities",
         (manageIdentities == null)
             ? null
@@ -3378,7 +3495,7 @@ public class KerberosHelperTest extends EasyMockSupport {
     final Config krb5ConfConfig = createMock(Config.class);
     expect(krb5ConfConfig.getProperties()).andReturn(krb5ConfProperties).anyTimes();
 
-    final Cluster cluster = createMockCluster(Collections.singleton(host), SecurityType.KERBEROS, krb5ConfConfig, kerberosEnvConfig);
+    final Cluster cluster = createMockCluster("c1", Collections.singleton(host), SecurityType.KERBEROS, krb5ConfConfig, kerberosEnvConfig);
     expect(cluster.getServices())
         .andReturn(new HashMap<String, Service>() {
           {
@@ -3558,8 +3675,8 @@ public class KerberosHelperTest extends EasyMockSupport {
         })
         .anyTimes();
     expect(cluster.getServiceComponentHosts(InetAddress.getLocalHost().getCanonicalHostName().toLowerCase()))
-      .andReturn(new ArrayList<ServiceComponentHost>())
-      .anyTimes();
+        .andReturn(new ArrayList<ServiceComponentHost>())
+        .anyTimes();
 
     final Map<String, String> kerberosEnvProperties = new HashMap<String, String>() {
       {
@@ -3579,12 +3696,12 @@ public class KerberosHelperTest extends EasyMockSupport {
     expect(krb5ConfConfig.getProperties()).andReturn(krb5ConfProperties).anyTimes();
 
     expect(cluster.getDesiredConfigByType("krb5-conf"))
-      .andReturn(krb5ConfConfig)
-      .anyTimes();
+        .andReturn(krb5ConfConfig)
+        .anyTimes();
 
     expect(cluster.getDesiredConfigByType("kerberos-env"))
-      .andReturn(kerberosEnvConfig)
-      .anyTimes();
+        .andReturn(kerberosEnvConfig)
+        .anyTimes();
 
     expect(cluster.getCurrentStackVersion())
         .andReturn(new StackId("HDP", "2.2"))
@@ -3786,6 +3903,10 @@ public class KerberosHelperTest extends EasyMockSupport {
     KerberosKeytabDescriptor descriptor = createMock(KerberosKeytabDescriptor.class);
     expect(descriptor.getFile()).andReturn(file).anyTimes();
     expect(descriptor.getConfiguration()).andReturn(configuration).anyTimes();
+    expect(descriptor.getOwnerName()).andReturn("user1").anyTimes();
+    expect(descriptor.getOwnerAccess()).andReturn("rw").anyTimes();
+    expect(descriptor.getGroupName()).andReturn("groupA").anyTimes();
+    expect(descriptor.getGroupAccess()).andReturn("r").anyTimes();
     return descriptor;
   }
 
@@ -3866,14 +3987,14 @@ public class KerberosHelperTest extends EasyMockSupport {
     return host;
   }
 
-  private Cluster createMockCluster(Collection<Host> hosts, SecurityType securityType, Config krb5ConfConfig, Config kerberosEnvConfig) {
+  private Cluster createMockCluster(String clusterName, Collection<Host> hosts, SecurityType securityType, Config krb5ConfConfig, Config kerberosEnvConfig) {
     Cluster cluster = createMock(Cluster.class);
     expect(cluster.getHosts()).andReturn(hosts).anyTimes();
     expect(cluster.getClusterId()).andReturn(1L).anyTimes();
     expect(cluster.getSecurityType()).andReturn(securityType).anyTimes();
     expect(cluster.getDesiredConfigByType("krb5-conf")).andReturn(krb5ConfConfig).anyTimes();
     expect(cluster.getDesiredConfigByType("kerberos-env")).andReturn(kerberosEnvConfig).anyTimes();
-    expect(cluster.getClusterName()).andReturn("c1").anyTimes();
+    expect(cluster.getClusterName()).andReturn(clusterName).anyTimes();
     expect(cluster.getCurrentStackVersion())
         .andReturn(new StackId("HDP", "2.2"))
         .anyTimes();


[2/8] ambari git commit: AMBARI-18463. Regression: krb5JAASLogin.conf is not updated during secure BP install (rlevas)

Posted by jo...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/1bf20690/ambari-server/src/test/java/org/apache/ambari/server/serveraction/kerberos/ConfigureAmbariIdentitiesServerActionTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/serveraction/kerberos/ConfigureAmbariIdentitiesServerActionTest.java b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/kerberos/ConfigureAmbariIdentitiesServerActionTest.java
new file mode 100644
index 0000000..439bcae
--- /dev/null
+++ b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/kerberos/ConfigureAmbariIdentitiesServerActionTest.java
@@ -0,0 +1,204 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.serveraction.kerberos;
+
+import com.google.inject.AbstractModule;
+import com.google.inject.Guice;
+import com.google.inject.Injector;
+import junit.framework.Assert;
+import org.apache.ambari.server.audit.AuditLogger;
+import org.apache.ambari.server.controller.KerberosHelper;
+import org.apache.ambari.server.orm.DBAccessor;
+import org.apache.ambari.server.orm.dao.HostDAO;
+import org.apache.ambari.server.orm.dao.KerberosPrincipalHostDAO;
+import org.apache.ambari.server.orm.entities.HostEntity;
+import org.apache.ambari.server.serveraction.ActionLog;
+import org.apache.ambari.server.state.Clusters;
+import org.apache.ambari.server.utils.StageUtils;
+import org.apache.commons.io.FileUtils;
+import org.easymock.EasyMockSupport;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.TemporaryFolder;
+
+import javax.persistence.EntityManager;
+import java.io.File;
+import java.lang.reflect.Method;
+
+import static org.easymock.EasyMock.expect;
+import static org.easymock.EasyMock.expectLastCall;
+
+public class ConfigureAmbariIdentitiesServerActionTest extends EasyMockSupport {
+  @Rule
+  public TemporaryFolder testFolder = new TemporaryFolder();
+
+  @Test
+  public void installAmbariServerIdentity() throws Exception {
+    installAmbariServerIdentity(createNiceMock(ActionLog.class), true);
+  }
+
+  @Test
+  public void installAmbariServerIdentityWithNoAgentOnAmbariServer() throws Exception {
+    installAmbariServerIdentity(createNiceMock(ActionLog.class), false);
+  }
+
+  @Test
+  public void installAmbariServerIdentityWithNullActionLog() throws Exception {
+    installAmbariServerIdentity(null, true);
+  }
+
+  private void installAmbariServerIdentity(ActionLog actionLog, boolean ambariServerHasAgent) throws Exception {
+
+    String principal = "ambari-server@EXAMPLE.COM";
+    File srcKeytabFile = testFolder.newFile();
+    File destKeytabFile = new File(testFolder.getRoot().getAbsolutePath(), "ambari-server.keytab");
+
+    Injector injector = createInjector();
+
+    HostEntity hostEntity;
+
+    if (ambariServerHasAgent) {
+      KerberosPrincipalHostDAO kerberosPrincipalHostDAO = injector.getInstance(KerberosPrincipalHostDAO.class);
+      expect(kerberosPrincipalHostDAO.exists(principal, 1L)).andReturn(false).once();
+      kerberosPrincipalHostDAO.create(principal, 1L);
+      expectLastCall().once();
+
+      hostEntity = createMock(HostEntity.class);
+      expect(hostEntity.getHostId()).andReturn(1L).once();
+    } else {
+      hostEntity = null;
+    }
+
+    HostDAO hostDAO = injector.getInstance(HostDAO.class);
+    expect(hostDAO.findByName(StageUtils.getHostName())).andReturn(hostEntity).once();
+
+    // Mock the methods that do the actual file manipulation to avoid having to deal with ambari-sudo.sh used in
+    // ShellCommandUtil#mkdir, ShellCommandUtil#copyFile, etc..
+    Method methodCopyFile = ConfigureAmbariIdentitiesServerAction.class.getDeclaredMethod("copyFile",
+        String.class, String.class);
+    Method methodSetFileACL = ConfigureAmbariIdentitiesServerAction.class.getDeclaredMethod("setFileACL",
+        String.class, String.class, boolean.class, boolean.class, String.class, boolean.class, boolean.class);
+
+    ConfigureAmbariIdentitiesServerAction action = createMockBuilder(ConfigureAmbariIdentitiesServerAction.class)
+        .addMockedMethod(methodCopyFile)
+        .addMockedMethod(methodSetFileACL)
+        .createMock();
+
+    action.copyFile(srcKeytabFile.getAbsolutePath(), destKeytabFile.getAbsolutePath());
+    expectLastCall().once();
+
+    action.setFileACL(destKeytabFile.getAbsolutePath(), "user1", true, true, "groupA", true, false);
+    expectLastCall().once();
+
+    replayAll();
+
+    injector.injectMembers(action);
+    action.installAmbariServerIdentity(principal, srcKeytabFile.getAbsolutePath(), destKeytabFile.getAbsolutePath(),
+        "user1", true, true, "groupA", true, false, actionLog);
+
+    verifyAll();
+
+    // There is no need to verify that the file was copied. We are not testing the ability to copy
+    // and we have mocked the method that does the actual copying to avoid having to deal with
+    // ambari-sudo.sh via the ShellCommandUtil class.
+  }
+
+  @Test
+  public void configureJAAS() throws Exception {
+    configureJAAS(createNiceMock(ActionLog.class));
+  }
+
+  @Test
+  public void configureJAASWithNullActionLog() throws Exception {
+    configureJAAS(null);
+  }
+
+  private void configureJAAS(ActionLog actionLog) throws Exception {
+    String principal = "ambari-server@EXAMPLE.COM";
+    String keytabFilePath = "/etc/security/keytabs/ambari.server.keytab";
+
+    File jaasConfFile = testFolder.newFile();
+    File jaasConfFileBak = new File(jaasConfFile.getAbsolutePath() + ".bak");
+    String originalJAASFileContent =
+        "com.sun.security.jgss.krb5.initiate {\n" +
+            "    com.sun.security.auth.module.Krb5LoginModule required\n" +
+            "    renewTGT=false\n" +
+            "    doNotPrompt=true\n" +
+            "    useKeyTab=true\n" +
+            "    keyTab=\"/etc/security/keytabs/ambari.keytab\"\n" +
+            "    principal=\"ambari@EXAMPLE.COM\"\n" +
+            "    storeKey=true\n" +
+            "    useTicketCache=false;\n" +
+            "};\n";
+
+    FileUtils.writeStringToFile(jaasConfFile, originalJAASFileContent);
+
+    Injector injector = createInjector();
+
+    Method methodGetJAASConfFilePath = ConfigureAmbariIdentitiesServerAction.class.getDeclaredMethod("getJAASConfFilePath");
+
+    ConfigureAmbariIdentitiesServerAction action = createMockBuilder(ConfigureAmbariIdentitiesServerAction.class)
+        .addMockedMethod(methodGetJAASConfFilePath)
+        .createMock();
+
+    expect(action.getJAASConfFilePath()).andReturn(jaasConfFile.getAbsolutePath());
+
+    replayAll();
+
+    injector.injectMembers(action);
+    action.configureJAAS(principal, keytabFilePath, actionLog);
+
+    verifyAll();
+
+    Assert.assertEquals(
+        "com.sun.security.jgss.krb5.initiate {\n" +
+            "    com.sun.security.auth.module.Krb5LoginModule required\n" +
+            "    renewTGT=false\n" +
+            "    doNotPrompt=true\n" +
+            "    useKeyTab=true\n" +
+            "    keyTab=\"/etc/security/keytabs/ambari.server.keytab\"\n" +
+            "    principal=\"ambari-server@EXAMPLE.COM\"\n" +
+            "    storeKey=true\n" +
+            "    useTicketCache=false;\n" +
+            "};\n",
+        FileUtils.readFileToString(jaasConfFile)
+    );
+
+    // Ensure the backup file matches the original content
+    Assert.assertEquals(originalJAASFileContent, FileUtils.readFileToString(jaasConfFileBak));
+  }
+
+
+  private Injector createInjector() {
+    return Guice.createInjector(new AbstractModule() {
+      @Override
+      protected void configure() {
+        bind(EntityManager.class).toInstance(createNiceMock(EntityManager.class));
+        bind(DBAccessor.class).toInstance(createNiceMock(DBAccessor.class));
+        bind(AuditLogger.class).toInstance(createNiceMock(AuditLogger.class));
+        bind(Clusters.class).toInstance(createNiceMock(Clusters.class));
+        bind(KerberosHelper.class).toInstance(createNiceMock(KerberosHelper.class));
+
+        bind(HostDAO.class).toInstance(createMock(HostDAO.class));
+        bind(KerberosPrincipalHostDAO.class).toInstance(createMock(KerberosPrincipalHostDAO.class));
+      }
+    });
+  }
+
+}
\ No newline at end of file


[7/8] ambari git commit: AMBARI-18519 - Enable Add/Remove JournalNode on NNHA Wizard Step 2 (rzang)

Posted by jo...@apache.org.
AMBARI-18519 - Enable Add/Remove JournalNode on NNHA Wizard Step 2 (rzang)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/a442efbd
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/a442efbd
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/a442efbd

Branch: refs/heads/branch-feature-AMBARI-18456
Commit: a442efbde22ac635b60b5650df397fd06c147ae1
Parents: 98efb57
Author: Richard Zang <rz...@apache.org>
Authored: Tue Oct 4 14:19:55 2016 -0700
Committer: Richard Zang <rz...@apache.org>
Committed: Tue Oct 4 14:19:55 2016 -0700

----------------------------------------------------------------------
 .../nameNode/step2_controller.js                | 32 +++++++++++++++++++-
 .../admin/highAvailability/nameNode/step3.hbs   | 10 +++---
 2 files changed, 35 insertions(+), 7 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/a442efbd/ambari-web/app/controllers/main/admin/highAvailability/nameNode/step2_controller.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/controllers/main/admin/highAvailability/nameNode/step2_controller.js b/ambari-web/app/controllers/main/admin/highAvailability/nameNode/step2_controller.js
index b8d7978..435fe0f 100644
--- a/ambari-web/app/controllers/main/admin/highAvailability/nameNode/step2_controller.js
+++ b/ambari-web/app/controllers/main/admin/highAvailability/nameNode/step2_controller.js
@@ -34,7 +34,37 @@ App.HighAvailabilityWizardStep2Controller = Em.Controller.extend(App.BlueprintMi
 
   showAdditionalPrefix: ['NAMENODE'],
 
-  showInstalledMastersFirst: true
+  showInstalledMastersFirst: true,
+
+  JOURNALNODES_COUNT_MINIMUM: 3, // TODO get this from stack
+  
+  renderComponents: function(masterComponents) {
+    this._super(masterComponents);
+    this.showHideJournalNodesAddRemoveControl();
+  },
+
+  addComponent: function(componentName) {
+    this._super(componentName);
+    this.showHideJournalNodesAddRemoveControl();
+  },
+
+  removeComponent: function(componentName, serviceComponentId) {
+    this._super(componentName, serviceComponentId);
+    this.showHideJournalNodesAddRemoveControl()
+  },
+
+  showHideJournalNodesAddRemoveControl: function() {
+    var masterComponents = this.get('selectedServicesMasters');
+    var jns = masterComponents.filterProperty('component_name', 'JOURNALNODE');
+    var maxNumMasters = this.getMaxNumberOfMasters('JOURNALNODE')
+    var showRemoveControl = jns.get('length') > this.get('JOURNALNODES_COUNT_MINIMUM');
+    var showAddControl = jns.get('length') < maxNumMasters;
+    jns.forEach(function(item) {
+      item.set('showAddControl', false);
+      item.set('showRemoveControl', showRemoveControl);
+    });
+    jns.set('lastObject.showAddControl', showAddControl);
+  }
 
 });
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/a442efbd/ambari-web/app/templates/main/admin/highAvailability/nameNode/step3.hbs
----------------------------------------------------------------------
diff --git a/ambari-web/app/templates/main/admin/highAvailability/nameNode/step3.hbs b/ambari-web/app/templates/main/admin/highAvailability/nameNode/step3.hbs
index 27653d3..ef2621d 100644
--- a/ambari-web/app/templates/main/admin/highAvailability/nameNode/step3.hbs
+++ b/ambari-web/app/templates/main/admin/highAvailability/nameNode/step3.hbs
@@ -52,12 +52,10 @@
         </td>
         <td>
           <ul>
-            <li><span class="to-be-installed-green"><i
-                    class="icon-plus"></i>&nbsp;{{t admin.highAvailability.wizard.step3.toBeInstalled}}</span></li>
-            <li><span class="to-be-installed-green"><i
-                    class="icon-plus"></i>&nbsp;{{t admin.highAvailability.wizard.step3.toBeInstalled}}</span></li>
-            <li><span class="to-be-installed-green"><i
-                    class="icon-plus"></i>&nbsp;{{t admin.highAvailability.wizard.step3.toBeInstalled}}</span></li>
+              {{#each item in view.journalNodes}}
+                  <li><span class="to-be-installed-green"><i
+                          class="icon-plus"></i>&nbsp;{{t admin.highAvailability.wizard.step3.toBeInstalled}}</span></li>
+              {{/each}}
           </ul>
         </td>
       </tr>


[6/8] ambari git commit: AMBARI-18518 : Sinks should not try to read collector hosts from Zk if AMS is in embedded mode. (avijayan)

Posted by jo...@apache.org.
AMBARI-18518 : Sinks should not try to read collector hosts from Zk if AMS is in embedded mode. (avijayan)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/98efb571
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/98efb571
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/98efb571

Branch: refs/heads/branch-feature-AMBARI-18456
Commit: 98efb571d63a17973bb62a510593c2041c14dcf6
Parents: c10053b
Author: Aravindan Vijayan <av...@hortonworks.com>
Authored: Tue Oct 4 11:23:32 2016 -0700
Committer: Aravindan Vijayan <av...@hortonworks.com>
Committed: Tue Oct 4 11:23:32 2016 -0700

----------------------------------------------------------------------
 .../availability/MetricCollectorHAHelper.java   | 10 +++++-
 .../availability/MetricCollectorHATest.java     | 34 +++++++++++++++++++-
 2 files changed, 42 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/98efb571/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/availability/MetricCollectorHAHelper.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/availability/MetricCollectorHAHelper.java b/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/availability/MetricCollectorHAHelper.java
index 2254362..e7f7cfd 100644
--- a/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/availability/MetricCollectorHAHelper.java
+++ b/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/availability/MetricCollectorHAHelper.java
@@ -25,6 +25,7 @@ import org.apache.curator.RetryPolicy;
 import org.apache.curator.retry.BoundedExponentialBackoffRetry;
 import org.apache.curator.retry.RetryUntilElapsed;
 import org.apache.zookeeper.ZooKeeper;
+import org.apache.zookeeper.data.Stat;
 
 import java.util.Collection;
 import java.util.HashSet;
@@ -44,7 +45,8 @@ public class MetricCollectorHAHelper {
 
   private static final int CONNECTION_TIMEOUT = 2000;
   private static final int SESSION_TIMEOUT = 10000;
-  private static final String ZK_PATH = "/ambari-metrics-cluster/LIVEINSTANCES";
+  private static final String ZNODE = "/ambari-metrics-cluster";
+  private static final String ZK_PATH = ZNODE + "/LIVEINSTANCES";
   private static final String INSTANCE_NAME_DELIMITER = "_";
 
 
@@ -72,6 +74,12 @@ public class MetricCollectorHAHelper {
 
     try {
       client.start();
+      //Check if Znode exists
+      Stat stat = client.getZooKeeper().exists(ZNODE, false);
+      if (stat == null) {
+        LOG.info("/ambari-metrics-cluster znode does not exist. Skipping requesting live instances from zookeeper");
+        return collectors;
+      }
       liveInstances = RetryLoop.callWithRetry(client, new Callable<List<String>>() {
         @Override
         public List<String> call() throws Exception {

http://git-wip-us.apache.org/repos/asf/ambari/blob/98efb571/ambari-metrics/ambari-metrics-common/src/test/java/org/apache/hadoop/metrics2/sink/timeline/availability/MetricCollectorHATest.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-common/src/test/java/org/apache/hadoop/metrics2/sink/timeline/availability/MetricCollectorHATest.java b/ambari-metrics/ambari-metrics-common/src/test/java/org/apache/hadoop/metrics2/sink/timeline/availability/MetricCollectorHATest.java
index ac1f52d..3d00270 100644
--- a/ambari-metrics/ambari-metrics-common/src/test/java/org/apache/hadoop/metrics2/sink/timeline/availability/MetricCollectorHATest.java
+++ b/ambari-metrics/ambari-metrics-common/src/test/java/org/apache/hadoop/metrics2/sink/timeline/availability/MetricCollectorHATest.java
@@ -20,9 +20,13 @@ package org.apache.hadoop.metrics2.sink.timeline.availability;
 import com.google.gson.Gson;
 import junit.framework.Assert;
 import org.apache.commons.io.IOUtils;
+import org.apache.curator.CuratorZookeeperClient;
+import org.apache.curator.retry.BoundedExponentialBackoffRetry;
 import org.apache.hadoop.metrics2.sink.timeline.AbstractTimelineMetricsSink;
+import org.apache.zookeeper.ZooKeeper;
 import org.junit.Test;
 import org.junit.runner.RunWith;
+import org.powermock.api.easymock.PowerMock;
 import org.powermock.core.classloader.annotations.PrepareForTest;
 import org.powermock.modules.junit4.PowerMockRunner;
 import java.io.IOException;
@@ -34,13 +38,14 @@ import java.util.Arrays;
 import java.util.Collection;
 
 import static org.easymock.EasyMock.expect;
+import static org.easymock.EasyMock.expectLastCall;
 import static org.powermock.api.easymock.PowerMock.createNiceMock;
 import static org.powermock.api.easymock.PowerMock.expectNew;
 import static org.powermock.api.easymock.PowerMock.replayAll;
 import static org.powermock.api.easymock.PowerMock.verifyAll;
 
 @RunWith(PowerMockRunner.class)
-@PrepareForTest({AbstractTimelineMetricsSink.class, URL.class, HttpURLConnection.class})
+@PrepareForTest({AbstractTimelineMetricsSink.class, URL.class, HttpURLConnection.class, MetricCollectorHAHelper.class})
 public class MetricCollectorHATest {
 
   @Test
@@ -71,6 +76,33 @@ public class MetricCollectorHATest {
 
   }
 
+
+  @Test
+  public void testEmbeddedModeCollectorZK() throws Exception {
+
+
+    BoundedExponentialBackoffRetry retryPolicyMock = PowerMock.createMock(BoundedExponentialBackoffRetry.class);
+    expectNew(BoundedExponentialBackoffRetry.class, 1000, 10000, 1).andReturn(retryPolicyMock);
+
+    CuratorZookeeperClient clientMock = PowerMock.createMock(CuratorZookeeperClient.class);
+    expectNew(CuratorZookeeperClient.class, "zkQ", 10000, 2000, null, retryPolicyMock)
+      .andReturn(clientMock);
+
+    clientMock.start();
+    expectLastCall().once();
+
+    ZooKeeper zkMock = PowerMock.createMock(ZooKeeper.class);
+    expect(clientMock.getZooKeeper()).andReturn(zkMock).once();
+
+    expect(zkMock.exists("/ambari-metrics-cluster", false)).andReturn(null).once();
+
+    replayAll();
+    MetricCollectorHAHelper metricCollectorHAHelper = new MetricCollectorHAHelper("zkQ", 1, 1000);
+    Collection<String> liveInstances = metricCollectorHAHelper.findLiveCollectorHostsFromZNode();
+    verifyAll();
+    Assert.assertTrue(liveInstances.isEmpty());
+  }
+
   @Test
   public void findCollectorUsingKnownCollectorTest() throws Exception {
     HttpURLConnection connection = createNiceMock(HttpURLConnection.class);


[8/8] ambari git commit: Merge branch 'trunk' into branch-feature-AMBARI-18456

Posted by jo...@apache.org.
Merge branch 'trunk' into branch-feature-AMBARI-18456


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/5467ad07
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/5467ad07
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/5467ad07

Branch: refs/heads/branch-feature-AMBARI-18456
Commit: 5467ad0737bdadf23e0cec752acdbdc1b250de3c
Parents: 23fbfe4 a442efb
Author: Jonathan Hurley <jh...@hortonworks.com>
Authored: Tue Oct 4 19:59:16 2016 -0400
Committer: Jonathan Hurley <jh...@hortonworks.com>
Committed: Tue Oct 4 19:59:16 2016 -0400

----------------------------------------------------------------------
 .../timeline/AbstractTimelineMetricsSink.java   |  75 +++---
 .../availability/MetricCollectorHAHelper.java   |  19 +-
 .../availability/MetricCollectorHATest.java     |  34 ++-
 .../src/main/python/core/blacklisted_set.py     |  14 ++
 .../src/main/python/core/config_reader.py       |   3 +-
 .../src/main/python/core/emitter.py             |  74 ++++--
 .../server/controller/KerberosHelperImpl.java   | 243 ++++++++++++++-----
 .../ConfigureAmbariIdentitiesServerAction.java  | 190 +++++++++++----
 .../kerberos/FinalizeKerberosServerAction.java  | 118 ++++++++-
 .../kerberos/KerberosOperationHandler.java      |   2 +-
 .../server/upgrade/UpgradeCatalog250.java       |  30 ++-
 .../ambari/server/utils/ShellCommandUtil.java   | 121 +++++++++
 .../resources/stacks/HDP/2.0.6/kerberos.json    |  14 +-
 .../server/controller/KerberosHelperTest.java   | 193 ++++++++++++---
 ...nfigureAmbariIdentitiesServerActionTest.java | 204 ++++++++++++++++
 .../server/upgrade/UpgradeCatalog250Test.java   |  61 +++++
 ambari-web/app/assets/test/tests.js             |   1 +
 .../nameNode/step2_controller.js                |  32 ++-
 .../main/admin/stack_and_upgrade_controller.js  |  45 +++-
 .../app/mappers/repository_version_mapper.js    |  28 ++-
 ambari-web/app/messages.js                      |   1 +
 .../models/stack_version/repository_version.js  |   6 +
 .../admin/highAvailability/nameNode/step3.hbs   |  10 +-
 .../admin/stack_upgrade/upgrade_version_box.hbs |   2 +-
 .../stack_upgrade/upgrade_version_column.hbs    |   2 +-
 ambari-web/app/utils/ajax/ajax.js               |   5 +
 .../stack_upgrade/upgrade_version_box_view.js   |   6 +
 .../upgrade_version_column_view.js              |   6 +-
 .../admin/stack_and_upgrade_controller_test.js  |  74 +++++-
 .../mappers/repository_version_mapper_test.js   |  46 ++++
 .../upgrade_version_box_view_test.js            |  23 +-
 31 files changed, 1447 insertions(+), 235 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/5467ad07/ambari-server/src/test/java/org/apache/ambari/server/controller/KerberosHelperTest.java
----------------------------------------------------------------------
diff --cc ambari-server/src/test/java/org/apache/ambari/server/controller/KerberosHelperTest.java
index 6ac607d,b2eb738..fd5afcd
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/KerberosHelperTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/KerberosHelperTest.java
@@@ -18,39 -18,10 +18,42 @@@
  
  package org.apache.ambari.server.controller;
  
 -import com.google.inject.AbstractModule;
 -import com.google.inject.Guice;
 -import com.google.inject.Injector;
 -import junit.framework.Assert;
 +import static org.easymock.EasyMock.anyLong;
 +import static org.easymock.EasyMock.anyObject;
++import static org.easymock.EasyMock.anyString;
 +import static org.easymock.EasyMock.capture;
 +import static org.easymock.EasyMock.eq;
 +import static org.easymock.EasyMock.expect;
 +import static org.easymock.EasyMock.expectLastCall;
 +import static org.easymock.EasyMock.getCurrentArguments;
 +import static org.easymock.EasyMock.isNull;
 +import static org.easymock.EasyMock.newCapture;
 +import static org.easymock.EasyMock.replay;
 +import static org.easymock.EasyMock.reset;
 +import static org.easymock.EasyMock.verify;
 +import static org.junit.Assert.assertEquals;
 +import static org.junit.Assert.assertFalse;
 +import static org.junit.Assert.assertNotNull;
 +import static org.junit.Assert.assertTrue;
 +
++import java.io.File;
++import java.lang.reflect.Method;
 +import java.net.InetAddress;
 +import java.text.SimpleDateFormat;
 +import java.util.ArrayList;
 +import java.util.Arrays;
 +import java.util.Collection;
 +import java.util.Collections;
 +import java.util.Date;
 +import java.util.HashMap;
 +import java.util.HashSet;
 +import java.util.List;
 +import java.util.Map;
 +import java.util.Set;
 +import java.util.concurrent.TimeUnit;
 +
 +import javax.persistence.EntityManager;
 +
  import org.apache.ambari.server.AmbariException;
  import org.apache.ambari.server.Role;
  import org.apache.ambari.server.actionmanager.ActionManager;
@@@ -121,14 -96,45 +127,16 @@@ import org.easymock.CaptureType
  import org.easymock.EasyMockSupport;
  import org.easymock.IAnswer;
  import org.junit.After;
++import org.junit.Assert;
  import org.junit.Before;
+ import org.junit.Rule;
  import org.junit.Test;
+ import org.junit.rules.TemporaryFolder;
  
 -import javax.persistence.EntityManager;
 -import java.io.File;
 -import java.lang.reflect.Method;
 -import java.net.InetAddress;
 -import java.text.SimpleDateFormat;
 -import java.util.ArrayList;
 -import java.util.Arrays;
 -import java.util.Collection;
 -import java.util.Collections;
 -import java.util.Date;
 -import java.util.HashMap;
 -import java.util.HashSet;
 -import java.util.List;
 -import java.util.Map;
 -import java.util.Set;
 -import java.util.concurrent.TimeUnit;
 +import com.google.inject.AbstractModule;
 +import com.google.inject.Guice;
 +import com.google.inject.Injector;
  
- import junit.framework.Assert;
 -import static org.easymock.EasyMock.anyLong;
 -import static org.easymock.EasyMock.anyObject;
 -import static org.easymock.EasyMock.anyString;
 -import static org.easymock.EasyMock.capture;
 -import static org.easymock.EasyMock.eq;
 -import static org.easymock.EasyMock.expect;
 -import static org.easymock.EasyMock.expectLastCall;
 -import static org.easymock.EasyMock.getCurrentArguments;
 -import static org.easymock.EasyMock.isNull;
 -import static org.easymock.EasyMock.newCapture;
 -import static org.easymock.EasyMock.replay;
 -import static org.easymock.EasyMock.reset;
 -import static org.easymock.EasyMock.verify;
 -import static org.junit.Assert.assertEquals;
 -import static org.junit.Assert.assertFalse;
 -import static org.junit.Assert.assertNotNull;
 -import static org.junit.Assert.assertTrue;
  
  @SuppressWarnings("unchecked")
  public class KerberosHelperTest extends EasyMockSupport {