You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by sw...@apache.org on 2016/12/01 08:13:44 UTC

[01/50] ambari git commit: AMBARI-18974. A number of Ambari Server Unit Test failures on branch-2.5 (dlysnichenko)

Repository: ambari
Updated Branches:
  refs/heads/branch-feature-AMBARI-18901 b74905495 -> 333c70150


AMBARI-18974. A number of Ambari Server Unit Test failures on branch-2.5 (dlysnichenko)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/92823a75
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/92823a75
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/92823a75

Branch: refs/heads/branch-feature-AMBARI-18901
Commit: 92823a75d38cb5649ed6edddfbc29796f5853ab9
Parents: c538cd8
Author: Lisnichenko Dmitro <dl...@hortonworks.com>
Authored: Thu Nov 24 11:51:54 2016 +0200
Committer: Lisnichenko Dmitro <dl...@hortonworks.com>
Committed: Thu Nov 24 11:52:32 2016 +0200

----------------------------------------------------------------------
 .../controller/internal/StackDefinedPropertyProviderTest.java      | 2 ++
 .../server/controller/metrics/RestMetricsPropertyProviderTest.java | 2 ++
 2 files changed, 4 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/92823a75/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/StackDefinedPropertyProviderTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/StackDefinedPropertyProviderTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/StackDefinedPropertyProviderTest.java
index 958957b..75d5bd8 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/StackDefinedPropertyProviderTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/StackDefinedPropertyProviderTest.java
@@ -52,6 +52,7 @@ import org.apache.ambari.server.controller.spi.Resource;
 import org.apache.ambari.server.controller.spi.SystemException;
 import org.apache.ambari.server.controller.spi.TemporalInfo;
 import org.apache.ambari.server.controller.utilities.PropertyHelper;
+import org.apache.ambari.server.events.publishers.AmbariEventPublisher;
 import org.apache.ambari.server.orm.GuiceJpaInitializer;
 import org.apache.ambari.server.orm.InMemoryDefaultTestModule;
 import org.apache.ambari.server.orm.OrmTestHelper;
@@ -157,6 +158,7 @@ public class StackDefinedPropertyProviderTest {
     field.set(null, amc);
     Clusters clustersMock = createNiceMock(Clusters.class);
     Cluster clusterMock = createNiceMock(Cluster.class);
+    expect(amc.getAmbariEventPublisher()).andReturn(createNiceMock(AmbariEventPublisher.class)).anyTimes();
     expect(amc.getClusters()).andReturn(clustersMock).anyTimes();
     expect(clustersMock.getCluster(CLUSTER_NAME_PROPERTY_ID)).andReturn(clusterMock).anyTimes();
     expect(clusterMock.getResourceId()).andReturn(2L).anyTimes();

http://git-wip-us.apache.org/repos/asf/ambari/blob/92823a75/ambari-server/src/test/java/org/apache/ambari/server/controller/metrics/RestMetricsPropertyProviderTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/metrics/RestMetricsPropertyProviderTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/metrics/RestMetricsPropertyProviderTest.java
index a9934a2..410e2f1 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/metrics/RestMetricsPropertyProviderTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/metrics/RestMetricsPropertyProviderTest.java
@@ -46,6 +46,7 @@ import org.apache.ambari.server.controller.spi.SystemException;
 import org.apache.ambari.server.controller.spi.TemporalInfo;
 import org.apache.ambari.server.controller.utilities.PropertyHelper;
 import org.apache.ambari.server.controller.utilities.StreamProvider;
+import org.apache.ambari.server.events.publishers.AmbariEventPublisher;
 import org.apache.ambari.server.orm.GuiceJpaInitializer;
 import org.apache.ambari.server.orm.InMemoryDefaultTestModule;
 import org.apache.ambari.server.security.TestAuthenticationFactory;
@@ -135,6 +136,7 @@ public class RestMetricsPropertyProviderTest {
 
     ConfigHelper configHelperMock = createNiceMock(ConfigHelper.class);
     expect(amc.getClusters()).andReturn(clusters).anyTimes();
+    expect(amc.getAmbariEventPublisher()).andReturn(createNiceMock(AmbariEventPublisher.class)).anyTimes();
     expect(amc.findConfigurationTagsWithOverrides(eq(c1), anyString())).andReturn(Collections.singletonMap("storm-site",
         Collections.singletonMap("tag", "version1"))).anyTimes();
     expect(amc.getConfigHelper()).andReturn(configHelperMock).anyTimes();


[29/50] ambari git commit: Revert "AMBARI-18912. Implement Create Alerts: step 1 select alert type.(XIWANG)"

Posted by sw...@apache.org.
Revert "AMBARI-18912. Implement Create Alerts: step 1 select alert type.(XIWANG)"

This reverts commit 86ecd898e7045c67cee85ca25c6c746298bd5ef8.


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/be8354a5
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/be8354a5
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/be8354a5

Branch: refs/heads/branch-feature-AMBARI-18901
Commit: be8354a5a987e9e92ca79cffb72496fc9a34fb4f
Parents: 2073588
Author: Xi Wang <xi...@apache.org>
Authored: Tue Nov 29 17:07:33 2016 -0800
Committer: Xi Wang <xi...@apache.org>
Committed: Tue Nov 29 17:09:19 2016 -0800

----------------------------------------------------------------------
 .../add_alert_definition/step1_controller.js    | 41 +++++++-----
 .../alerts/definition_configs_controller.js     | 40 ++++++------
 ambari-web/app/messages.js                      | 17 ++---
 .../app/models/alerts/alert_definition.js       | 54 ----------------
 ambari-web/app/styles/alerts.less               | 66 +-------------------
 .../main/alerts/add_alert_definition/step1.hbs  | 29 +++------
 .../alerts/add_alert_definition/step1_view.js   |  3 -
 .../step1_controller_test.js                    | 24 ++++++-
 8 files changed, 80 insertions(+), 194 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/be8354a5/ambari-web/app/controllers/main/alerts/add_alert_definition/step1_controller.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/controllers/main/alerts/add_alert_definition/step1_controller.js b/ambari-web/app/controllers/main/alerts/add_alert_definition/step1_controller.js
index c758fc0..43e25c8 100644
--- a/ambari-web/app/controllers/main/alerts/add_alert_definition/step1_controller.js
+++ b/ambari-web/app/controllers/main/alerts/add_alert_definition/step1_controller.js
@@ -24,25 +24,34 @@ App.AddAlertDefinitionStep1Controller = Em.Controller.extend({
 
   /**
    * List of available alert definition types
-   * @type {{name: string, isActive: boolean}[]}
+   * @type {{value: string, isActive: boolean}[]}
    */
-  alertDefinitionsTypes: function () {
-    return App.AlertType.find().map(function(option) {
-      return Em.Object.create({
-        name: option.get('name'),
-        displayName: option.get('displayName'),
-        icon: option.get('iconPath'),
-        description: option.get('description')
-      });
-    });
-  }.property(),
+  alertDefinitionsTypes: [
+    Em.Object.create({value: 'PORT', isActive: false, icon: 'icon-signal'}),
+    Em.Object.create({value: 'METRIC', isActive: false, icon: 'icon-bolt'}),
+    Em.Object.create({value: 'WEB', isActive: false, icon: 'icon-globe'}),
+    Em.Object.create({value: 'AGGREGATE', isActive: false, icon: 'icon-plus-sign-alt'}),
+    Em.Object.create({value: 'SCRIPT', isActive: false, icon: 'icon-code'}),
+    Em.Object.create({value: 'SERVER', isActive: false, icon: 'icon-desktop'}),
+    Em.Object.create({value: 'RECOVERY', isActive: false, icon: 'icon-desktop'})
+  ],
+
+  /**
+   * "Next"-button is disabled if user doesn't select any alert definition type
+   * @type {boolean}
+   */
+  isSubmitDisabled: Em.computed.everyBy('alertDefinitionsTypes', 'isActive', false),
 
   /**
    * Set selectedType if it exists in the wizard controller
    * @method loadStep
    */
   loadStep: function() {
-    this.set('content.selectedType', '');
+    this.get('alertDefinitionsTypes').setEach('isActive', false);
+    var selectedType = this.get('content.selectedType');
+    if(selectedType) {
+      this.selectType({context: {value: selectedType}});
+    }
   },
 
   /**
@@ -52,10 +61,10 @@ App.AddAlertDefinitionStep1Controller = Em.Controller.extend({
    */
   selectType: function(e) {
     var type = e.context,
-        types = this.get('alertDefinitionsTypes');
-    this.set('content.selectedType', type.name);
-    $("[rel='selectable-tooltip']").trigger('mouseleave');
-    App.router.send('next');
+      types = this.get('alertDefinitionsTypes');
+    types.setEach('isActive', false);
+    types.findProperty('value', type.value).set('isActive', true);
+    this.set('content.selectedType', type.value);
   }
 
 });

http://git-wip-us.apache.org/repos/asf/ambari/blob/be8354a5/ambari-web/app/controllers/main/alerts/definition_configs_controller.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/controllers/main/alerts/definition_configs_controller.js b/ambari-web/app/controllers/main/alerts/definition_configs_controller.js
index 138aaa4..4c82556 100644
--- a/ambari-web/app/controllers/main/alerts/definition_configs_controller.js
+++ b/ambari-web/app/controllers/main/alerts/definition_configs_controller.js
@@ -225,7 +225,7 @@ App.MainAlertDefinitionConfigsController = Em.Controller.extend({
     var result = [];
     var alertDefinition = this.get('content');
     var isWizard = this.get('isWizard');
-    var units = this.get('content.reporting') && this.get('content.reporting').findProperty('type','units') ?
+    var units = this.get('content.reporting').findProperty('type','units') ?
       this.get('content.reporting').findProperty('type','units').get('text'): null;
 
     if (this.get('isWizard')) {
@@ -256,7 +256,7 @@ App.MainAlertDefinitionConfigsController = Em.Controller.extend({
         value: isWizard ? '' : this.getThresholdsProperty('critical', 'value')
       }),
       App.AlertConfigProperties.Parameter.create({
-        value: isWizard ? '': alertDefinition.get('uri.connectionTimeout'),
+        value: alertDefinition.get('uri.connectionTimeout'),
         threshold: "CRITICAL",
         name: 'connection_timeout',
         label: 'Connection Timeout',
@@ -311,7 +311,7 @@ App.MainAlertDefinitionConfigsController = Em.Controller.extend({
         value: isWizard ? '' : this.getThresholdsProperty('critical', 'value')
       }),
       App.AlertConfigProperties.Parameter.create({
-        value: isWizard ? '': alertDefinition.get('uri.connectionTimeout'),
+        value: alertDefinition.get('uri.connectionTimeout'),
         threshold: "CRITICAL",
         name: 'connection_timeout',
         label: 'Connection Timeout',
@@ -404,7 +404,7 @@ App.MainAlertDefinitionConfigsController = Em.Controller.extend({
         valueMetric: units
       }),
       App.AlertConfigProperties.Parameter.create({
-        value: isWizard ? '': alertDefinition.get('uri.connectionTimeout'),
+        value: alertDefinition.get('uri.connectionTimeout'),
         name: 'connection_timeout',
         label: 'Connection Timeout',
         displayType: 'parameter',
@@ -447,22 +447,20 @@ App.MainAlertDefinitionConfigsController = Em.Controller.extend({
       NUMERIC: App.AlertConfigProperties.Parameters.NumericMixin,
       PERCENT: App.AlertConfigProperties.Parameters.PercentageMixin
     };
-    if (alertDefinition) {
-      alertDefinition.get('parameters').forEach(function (parameter) {
-        var mixin = mixins[parameter.get('type')] || {}; // validation depends on parameter-type
-        result.push(App.AlertConfigProperties.Parameter.create(mixin, {
-          value: isWizard ? '' : parameter.get('value'),
-          apiProperty: parameter.get('name'),
-          description: parameter.get('description'),
-          label: isWizard ? '' : parameter.get('displayName'),
-          threshold: isWizard ? '' : parameter.get('threshold'),
-          units: isWizard ? '' : parameter.get('units'),
-          type: isWizard ? '' : parameter.get('type'),
-          hidden: parameter.get('visibility') === "HIDDEN",
-          readonly: parameter.get('visibility') === "READ_ONLY"
-        }));
-      });
-    }
+    alertDefinition.get('parameters').forEach(function (parameter) {
+      var mixin = mixins[parameter.get('type')] || {}; // validation depends on parameter-type
+      result.push(App.AlertConfigProperties.Parameter.create(mixin, {
+        value: isWizard ? '' : parameter.get('value'),
+        apiProperty: parameter.get('name'),
+        description: parameter.get('description'),
+        label: isWizard ? '' : parameter.get('displayName'),
+        threshold: isWizard ? '' : parameter.get('threshold'),
+        units: isWizard ? '' : parameter.get('units'),
+        type: isWizard ? '' : parameter.get('type'),
+        hidden: parameter.get('visibility') === "HIDDEN",
+        readonly: parameter.get('visibility') === "READ_ONLY"
+      }));
+    });
 
     return result;
   },
@@ -475,7 +473,7 @@ App.MainAlertDefinitionConfigsController = Em.Controller.extend({
   renderAggregateConfigs: function () {
     var isWizard = this.get('isWizard');
     var alertDefinition = this.get('content');
-    var units = this.get('content.reporting') && this.get('content.reporting').findProperty('type','units') ?
+    var units = this.get('content.reporting').findProperty('type','units') ?
         this.get('content.reporting').findProperty('type','units').get('text'): null;
     return [
       App.AlertConfigProperties.Description.create({

http://git-wip-us.apache.org/repos/asf/ambari/blob/be8354a5/ambari-web/app/messages.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/messages.js b/ambari-web/app/messages.js
index c57be5e..e8517b2 100644
--- a/ambari-web/app/messages.js
+++ b/ambari-web/app/messages.js
@@ -1045,21 +1045,12 @@ Em.I18n.translations = {
   'form.validator.alertNotificationName':'Invalid Alert Notification Name. Only alphanumerics, hyphens, spaces and underscores are allowed.',
   'form.validator.configKey.specific':'"{0}" is invalid Key. Only alphanumerics, hyphens, underscores, asterisks and periods are allowed.',
 
-  'alerts.add.header': 'Create Alert',
-  'alerts.add.step1.header': 'Choose Alert Type',
-  'alerts.add.step1.header.description': 'Select the type of alert you want to create',
-  'alerts.add.step2.header': 'Define Alert',
-  'alerts.add.step3.header': 'Specify Threshold',
+  'alerts.add.header': 'Create Alert Definition',
+  'alerts.add.step1.header': 'Choose Type',
+  'alerts.add.step2.header': 'Configure',
+  'alerts.add.step3.header': 'Review',
   'alerts.add.step3.selectedType': 'Selected Type',
 
-  'alerts.add.wizard.step1.body.choose.tooltip': 'Click to select',
-  'alerts.add.wizard.step1.body.port.description':'Check TCP connectivity to a remote endpoint',
-  'alerts.add.wizard.step1.body.web.description':'Check for TCP connectivity and verify that a proper HTTP response code was returned',
-  'alerts.add.wizard.step1.body.metric.description':'Define JMX/AMS endpoints that can be queried for values',
-  'alerts.add.wizard.step1.body.script.description':'Defer all functionality to a Python script accessible to the Ambari agents from a specified relative or absolute path',
-  'alerts.add.wizard.step1.body.aggregate.description':'Combine the results of another alert definition from different nodes',
-  'alerts.add.wizard.step1.body.raw.description':'User can upload alert parameter using a JSON script',
-
   'alerts.fastAccess.popup.header': '{0} Critical or Warning Alerts',
   'alerts.fastAccess.popup.body.name': 'Alert Definition Name',
   'alerts.fastAccess.popup.body.showmore': 'Go to Alerts Definitions',

http://git-wip-us.apache.org/repos/asf/ambari/blob/be8354a5/ambari-web/app/models/alerts/alert_definition.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/models/alerts/alert_definition.js b/ambari-web/app/models/alerts/alert_definition.js
index c369caa..4da7199 100644
--- a/ambari-web/app/models/alerts/alert_definition.js
+++ b/ambari-web/app/models/alerts/alert_definition.js
@@ -325,57 +325,3 @@ App.AlertMetricsSourceDefinition.FIXTURES = [];
 App.AlertMetricsUriDefinition.FIXTURES = [];
 App.AlertMetricsAmsDefinition.FIXTURES = [];
 App.AlertDefinitionParameter.FIXTURES = [];
-
-
-App.AlertType = DS.Model.extend({
-  name: DS.attr('string'),
-  displayName: DS.attr('string'),
-  iconPath: DS.attr('string'),
-  description: DS.attr('string'),
-  properties: DS.attr('array')
-});
-
-App.AlertType.FIXTURES = [
-  {
-    id: 'PORT',
-    name: 'PORT',
-    icon_path: 'icon-signin',
-    display_name: 'Port',
-    description: Em.I18n.t('alerts.add.wizard.step1.body.port.description')
-  },
-  {
-    id: 'WEB',
-    name: 'WEB',
-    icon_path: 'icon-globe',
-    display_name: 'Web',
-    description: Em.I18n.t('alerts.add.wizard.step1.body.web.description')
-  },
-  {
-    id: 'METRIC',
-    name: 'METRIC',
-    display_name: 'Metric',
-    icon_path: 'icon-bolt',
-    description: Em.I18n.t('alerts.add.wizard.step1.body.metric.description')
-  },
-  {
-    id: 'SCRIPT',
-    name: 'SCRIPT',
-    icon_path: 'icon-file-text',
-    display_name: 'Script',
-    description: Em.I18n.t('alerts.add.wizard.step1.body.script.description')
-  },
-  {
-    id: 'AGGREGATE',
-    name: 'AGGREGATE',
-    icon_path: 'icon-plus',
-    display_name: 'Aggregate',
-    description: Em.I18n.t('alerts.add.wizard.step1.body.aggregate.description')
-  },
-  {
-    id: 'RAW',
-    name: 'RAW',
-    icon_path: 'icon-align-justify',
-    display_name: 'Raw',
-    description: Em.I18n.t('alerts.add.wizard.step1.body.raw.description')
-  }
-];

http://git-wip-us.apache.org/repos/asf/ambari/blob/be8354a5/ambari-web/app/styles/alerts.less
----------------------------------------------------------------------
diff --git a/ambari-web/app/styles/alerts.less b/ambari-web/app/styles/alerts.less
index c9ae361..e31ea30 100644
--- a/ambari-web/app/styles/alerts.less
+++ b/ambari-web/app/styles/alerts.less
@@ -668,68 +668,4 @@
 .alert-definition-filter-list {
   max-height: 200px;
   overflow-y: scroll;
-}
-
-/***** Start styles for alert create wizard *****/
-#create-alert-wizard-step1 {
-  .alert-types-container {
-    .alert-type {
-      height: 155px;
-      width: 30.5%;
-      margin: 0px 8px 20px 8px;
-      padding: 10px;
-      background: white;
-      border: 1px solid #ccc;
-      -webkit-border-radius: 4px;
-      -moz-border-radius: 4px;
-      border-radius: 4px;
-    }
-    .alert-type:hover {
-      cursor: pointer;
-      border: 1px solid #eee;
-      background: #eee;
-    }
-    .icon {
-      span {
-        font-size: 35px;
-        line-height: 3.5;
-        margin-left: 5px;
-      }
-    }
-    .icon.PORT {
-      color: #F6D955;
-    }
-    .icon.WEB {
-      color: #9ADCD4;
-    }
-    .icon.METRIC {
-      color: #F69F79;
-    }
-    .icon.SCRIPT {
-      color: #F2C2AA;
-    }
-    .icon.AGGREGATE {
-      color: #8AD4C7;
-    }
-    .icon.RAW {
-      color: #C9A6B2;
-    }
-    .label-description {
-      padding-top: 10px;
-      .label-text {
-        font-size: 14px;
-        font-weight: bold;
-        margin-bottom: 5px;
-        white-space: nowrap;
-        overflow: hidden;
-        text-overflow: ellipsis;
-      }
-      .description-text {
-        font-size: 12px;
-        overflow: hidden;
-        text-overflow: ellipsis;
-      }
-    }
-  }
-}
-
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/be8354a5/ambari-web/app/templates/main/alerts/add_alert_definition/step1.hbs
----------------------------------------------------------------------
diff --git a/ambari-web/app/templates/main/alerts/add_alert_definition/step1.hbs b/ambari-web/app/templates/main/alerts/add_alert_definition/step1.hbs
index 52e8671..9b207cd 100644
--- a/ambari-web/app/templates/main/alerts/add_alert_definition/step1.hbs
+++ b/ambari-web/app/templates/main/alerts/add_alert_definition/step1.hbs
@@ -16,23 +16,14 @@
 * limitations under the License.
 }}
 
-<div id="create-alert-wizard-step1">
-  <h2>{{t alerts.add.step1.header}}</h2>
-  <div class="alert alert-info">{{t alerts.add.step1.header.description}}</div>
-  <div class="alert-types-container row-fluid">
-    {{#each type in controller.alertDefinitionsTypes}}
-      <div class="span4 alert-type" rel="selectable-tooltip"
-        {{translateAttr data-original-title="alerts.add.wizard.step1.body.choose.tooltip"}}
-        {{action "selectType" type target="controller"}}>
-        <div {{bindAttr class="type.name :icon :span3"}}>
-          <span {{bindAttr class="type.icon"}}></span>
-        </div>
-        <div class="label-description span9">
-          <p class="label-text">{{type.displayName}}</p>
-          <p class="description-text">{{type.description}}</p>
-        </div>
-      </div>
-    {{/each}}
-  </div>
-</div>
+<h2>{{t alerts.add.step1.header}}</h2>
+
+{{#each type in controller.alertDefinitionsTypes}}
+  <a href="#" {{bindAttr class=":btn type.isActive:active"}} {{action selectType type target="controller"}}>
+    <span {{bindAttr class="type.icon"}}></span> {{type.value}}
+  </a>
+{{/each}}
 
+<div class="btn-area">
+  <a class="btn btn-success pull-right" {{bindAttr disabled="isSubmitDisabled"}} {{action next}}>{{t common.next}} &rarr;</a>
+</div>

http://git-wip-us.apache.org/repos/asf/ambari/blob/be8354a5/ambari-web/app/views/main/alerts/add_alert_definition/step1_view.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views/main/alerts/add_alert_definition/step1_view.js b/ambari-web/app/views/main/alerts/add_alert_definition/step1_view.js
index 398d721..d55d20f 100644
--- a/ambari-web/app/views/main/alerts/add_alert_definition/step1_view.js
+++ b/ambari-web/app/views/main/alerts/add_alert_definition/step1_view.js
@@ -24,9 +24,6 @@ App.AddAlertDefinitionStep1View = Em.View.extend({
 
   didInsertElement: function() {
     this.get('controller').loadStep();
-    Em.run.later(this, function () {
-      App.tooltip($("[rel='selectable-tooltip']"));
-    }, 300);
   }
 
 });

http://git-wip-us.apache.org/repos/asf/ambari/blob/be8354a5/ambari-web/test/controllers/main/alerts/add_alert_definition/step1_controller_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/controllers/main/alerts/add_alert_definition/step1_controller_test.js b/ambari-web/test/controllers/main/alerts/add_alert_definition/step1_controller_test.js
index 7d19239..322c08a 100644
--- a/ambari-web/test/controllers/main/alerts/add_alert_definition/step1_controller_test.js
+++ b/ambari-web/test/controllers/main/alerts/add_alert_definition/step1_controller_test.js
@@ -30,13 +30,13 @@ describe('App.AddAlertDefinitionStep1Controller', function () {
   describe('#selectType', function() {
 
     beforeEach(function () {
-      controller.get('content').set('selectedType', '');
+      controller.get('alertDefinitionsTypes').setEach('isActive', false);
     });
 
     it('should set isActive for selected type', function () {
       var e = {context: {value: 'PORT'}};
       controller.selectType(e);
-      expect(controller.get('content.selectedType')).to.equal('PORT');
+      expect(controller.get('alertDefinitionsTypes').findProperty('value', 'PORT').get('isActive')).to.be.true;
     });
 
   });
@@ -45,12 +45,30 @@ describe('App.AddAlertDefinitionStep1Controller', function () {
 
     beforeEach(function () {
       controller.set('content.selectedType', 'PORT');
+
     });
 
     it('should set predefined type', function () {
       controller.loadStep();
-      expect(controller.get('content.selectedType').to.equal(''));
+      expect(controller.get('alertDefinitionsTypes').findProperty('value', 'PORT').get('isActive')).to.be.true;
+    });
+
+  });
+
+  describe('#isSubmitDisabled', function () {
+
+    beforeEach(function () {
+      controller.get('alertDefinitionsTypes').setEach('isActive', false);
+    });
+
+    it('should be based on isActive', function () {
+
+      expect(controller.get('isSubmitDisabled')).to.be.true;
+      controller.get('alertDefinitionsTypes').objectAt(0).set('isActive', true);
+      expect(controller.get('isSubmitDisabled')).to.be.false;
+
     });
 
   });
+
 });
\ No newline at end of file


[11/50] ambari git commit: AMBARI-18976. Config History request execution time depends on config versions count. (mpapirkovskyy)

Posted by sw...@apache.org.
AMBARI-18976. Config History request execution time depends on config versions count. (mpapirkovskyy)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/911b9177
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/911b9177
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/911b9177

Branch: refs/heads/branch-feature-AMBARI-18901
Commit: 911b917783d66a44958e791e49d76b755d943f85
Parents: 568d1e6
Author: Myroslav Papirkovskyi <mp...@hortonworks.com>
Authored: Wed Nov 23 19:03:14 2016 +0200
Committer: Myroslav Papirkovskyi <mp...@hortonworks.com>
Committed: Mon Nov 28 18:42:05 2016 +0200

----------------------------------------------------------------------
 .../internal/ClusterControllerImpl.java         |  2 +-
 .../ServiceConfigVersionResourceProvider.java   |  3 +-
 .../logging/LoggingSearchPropertyProvider.java  | 11 ++++-
 .../ambari/server/orm/dao/ClusterDAO.java       | 24 +++++++++++
 .../ambari/server/orm/dao/ServiceConfigDAO.java | 13 +++---
 .../orm/entities/ClusterConfigEntity.java       |  7 +++-
 .../entities/ClusterConfigMappingEntity.java    |  6 +++
 .../orm/entities/ServiceConfigEntity.java       |  5 ++-
 .../server/state/cluster/ClusterImpl.java       | 42 +++++---------------
 9 files changed, 67 insertions(+), 46 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/911b9177/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClusterControllerImpl.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClusterControllerImpl.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClusterControllerImpl.java
index 32bed7b..c752e80 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClusterControllerImpl.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClusterControllerImpl.java
@@ -858,7 +858,7 @@ public class ClusterControllerImpl implements ClusterController {
       if (compVal == 0) {
         Schema schema = getSchema(resourceType);
 
-        for (Type type : Type.values()) {
+        for (Type type : schema.getKeyTypes()) {
           String keyPropertyId = schema.getKeyPropertyId(type);
           if (keyPropertyId != null) {
             compVal = compareValues(resource1.getPropertyValue(keyPropertyId),

http://git-wip-us.apache.org/repos/asf/ambari/blob/911b9177/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ServiceConfigVersionResourceProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ServiceConfigVersionResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ServiceConfigVersionResourceProvider.java
index 2edbe9b..e5ca389 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ServiceConfigVersionResourceProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ServiceConfigVersionResourceProvider.java
@@ -91,8 +91,9 @@ public class ServiceConfigVersionResourceProvider extends
     PROPERTY_IDS.add(SERVICE_CONFIG_VERSION_IS_COMPATIBLE_PROPERTY_ID);
 
     // keys
-    KEY_PROPERTY_IDS.put(Resource.Type.ServiceConfigVersion,SERVICE_CONFIG_VERSION_SERVICE_NAME_PROPERTY_ID);
+    KEY_PROPERTY_IDS.put(Resource.Type.Service,SERVICE_CONFIG_VERSION_SERVICE_NAME_PROPERTY_ID);
     KEY_PROPERTY_IDS.put(Resource.Type.Cluster,SERVICE_CONFIG_VERSION_CLUSTER_NAME_PROPERTY_ID);
+    KEY_PROPERTY_IDS.put(Resource.Type.ServiceConfigVersion,SERVICE_CONFIG_VERSION_PROPERTY_ID);
   }
 
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/911b9177/ambari-server/src/main/java/org/apache/ambari/server/controller/logging/LoggingSearchPropertyProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/logging/LoggingSearchPropertyProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/logging/LoggingSearchPropertyProvider.java
index d9db290..16788ed 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/logging/LoggingSearchPropertyProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/logging/LoggingSearchPropertyProvider.java
@@ -39,8 +39,10 @@ import org.apache.log4j.Logger;
 
 import java.util.Collections;
 import java.util.EnumSet;
+import java.util.HashMap;
 import java.util.LinkedList;
 import java.util.List;
+import java.util.Map;
 import java.util.Set;
 import java.util.concurrent.atomic.AtomicInteger;
 
@@ -73,7 +75,7 @@ public class LoggingSearchPropertyProvider implements PropertyProvider {
 
   @Override
   public Set<Resource> populateResources(Set<Resource> resources, Request request, Predicate predicate) throws SystemException {
-
+    Map<String, Boolean> isLogSearchRunning = new HashMap<>();
     for (Resource resource : resources) {
       // obtain the required identifying properties on the host component resource
       final String componentName = (String)resource.getPropertyValue(PropertyHelper.getPropertyId("HostRoles", "component_name"));
@@ -90,7 +92,12 @@ public class LoggingSearchPropertyProvider implements PropertyProvider {
         continue;
       }
 
-      if (!logSearchServerRunning(clusterName)) {
+      Boolean isLogSearchRunningForSpecifiedCluster = isLogSearchRunning.get(clusterName);
+      if (isLogSearchRunningForSpecifiedCluster == null) {
+        isLogSearchRunningForSpecifiedCluster = logSearchServerRunning(clusterName);
+        isLogSearchRunning.put(clusterName, isLogSearchRunningForSpecifiedCluster);
+      }
+      if (!isLogSearchRunningForSpecifiedCluster) {
         continue;
       }
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/911b9177/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/ClusterDAO.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/ClusterDAO.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/ClusterDAO.java
index e93ac0e..b727c72 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/ClusterDAO.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/ClusterDAO.java
@@ -116,6 +116,18 @@ public class ClusterDAO {
   }
 
   @RequiresSession
+  public List<ClusterConfigEntity> getLatestClusterConfigsByTypes(Long clusterId, List<String> types) {
+    TypedQuery<ClusterConfigEntity> query = entityManagerProvider.get().createNamedQuery(
+      "ClusterConfigEntity.findLatestClusterConfigsByTypes",
+      ClusterConfigEntity.class);
+
+    query.setParameter("clusterId", clusterId);
+    query.setParameter("types", types);
+
+    return daoUtils.selectList(query);
+  }
+
+  @RequiresSession
   public ClusterConfigEntity findConfig(Long clusterId, String type, Long version) {
     CriteriaBuilder cb = entityManagerProvider.get().getCriteriaBuilder();
     CriteriaQuery<ClusterConfigEntity> cq = cb.createQuery(ClusterConfigEntity.class);
@@ -240,6 +252,18 @@ public class ClusterDAO {
     return daoUtils.selectList(query);
   }
 
+  @RequiresSession
+  public List<ClusterConfigMappingEntity> getLatestClusterConfigMappingsEntityByType(long clusterId, String configType) {
+    TypedQuery<ClusterConfigMappingEntity> query = entityManagerProvider.get().createNamedQuery(
+      "ClusterConfigMappingEntity.findLatestClusterConfigMappingsByType",
+      ClusterConfigMappingEntity.class);
+
+    query.setParameter("clusterId", clusterId);
+    query.setParameter("typeName", configType);
+
+    return daoUtils.selectList(query);
+  }
+
   /**
    * Gets selected mappings for provided config types
    * @param clusterId cluster id

http://git-wip-us.apache.org/repos/asf/ambari/blob/911b9177/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/ServiceConfigDAO.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/ServiceConfigDAO.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/ServiceConfigDAO.java
index 2cbee8d..212a9f0 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/ServiceConfigDAO.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/ServiceConfigDAO.java
@@ -115,14 +115,13 @@ public class ServiceConfigDAO {
 
   @RequiresSession
   public List<ServiceConfigEntity> getLastServiceConfigs(Long clusterId) {
-    TypedQuery<ServiceConfigEntity> query = entityManagerProvider.get().
-      createQuery("SELECT scv FROM ServiceConfigEntity scv " +
-        "WHERE scv.clusterId = ?1 AND scv.createTimestamp = (" +
-        "SELECT MAX(scv2.createTimestamp) FROM ServiceConfigEntity scv2 " +
-        "WHERE scv2.serviceName = scv.serviceName AND scv2.clusterId = ?1 AND scv2.groupId IS NULL)",
-        ServiceConfigEntity.class);
+    TypedQuery<ServiceConfigEntity> query = entityManagerProvider.get().createNamedQuery(
+      "ServiceConfigEntity.findLatestServiceConfigsByCluster",
+      ServiceConfigEntity.class);
 
-    return daoUtils.selectList(query, clusterId);
+    query.setParameter("clusterId", clusterId);
+
+    return daoUtils.selectList(query);
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/ambari/blob/911b9177/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ClusterConfigEntity.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ClusterConfigEntity.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ClusterConfigEntity.java
index bf0faee..937e872 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ClusterConfigEntity.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ClusterConfigEntity.java
@@ -55,7 +55,12 @@ import javax.persistence.UniqueConstraint;
     @NamedQuery(name = "ClusterConfigEntity.findClusterConfigMappingsByStack",
       query = "SELECT mapping FROM ClusterConfigMappingEntity mapping " +
         "JOIN ClusterConfigEntity config ON mapping.typeName = config.type AND mapping.tag = config.tag " +
-        "WHERE mapping.clusterId = :clusterId AND config.stack = :stack")
+        "WHERE mapping.clusterId = :clusterId AND config.stack = :stack"),
+    @NamedQuery(name = "ClusterConfigEntity.findLatestClusterConfigsByTypes",
+      query = "SELECT cc FROM ClusterConfigEntity cc " +
+        "JOIN ClusterConfigMappingEntity ccm " +
+        "ON cc.clusterId = ccm.clusterId AND cc.type = ccm.typeName AND cc.tag = ccm.tag " +
+        "WHERE cc.clusterId = :clusterId AND ccm.selectedInd > 0 AND ccm.typeName IN :types")
 })
 
 public class ClusterConfigEntity {

http://git-wip-us.apache.org/repos/asf/ambari/blob/911b9177/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ClusterConfigMappingEntity.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ClusterConfigMappingEntity.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ClusterConfigMappingEntity.java
index c3c3e9e..04c6030 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ClusterConfigMappingEntity.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ClusterConfigMappingEntity.java
@@ -23,6 +23,8 @@ import javax.persistence.Id;
 import javax.persistence.IdClass;
 import javax.persistence.JoinColumn;
 import javax.persistence.ManyToOne;
+import javax.persistence.NamedQueries;
+import javax.persistence.NamedQuery;
 import javax.persistence.Table;
 
 /**
@@ -31,6 +33,10 @@ import javax.persistence.Table;
 @Table(name = "clusterconfigmapping")
 @Entity
 @IdClass(ClusterConfigMappingEntityPK.class)
+@NamedQueries({
+  @NamedQuery(name = "ClusterConfigMappingEntity.findLatestClusterConfigMappingsByType",
+    query = "SELECT mapping FROM ClusterConfigMappingEntity mapping WHERE mapping.clusterId = :clusterId AND mapping.selectedInd > 0 AND mapping.typeName = :typeName")})
+
 public class ClusterConfigMappingEntity {
 
   @Id

http://git-wip-us.apache.org/repos/asf/ambari/blob/911b9177/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ServiceConfigEntity.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ServiceConfigEntity.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ServiceConfigEntity.java
index 7c28835..0df6f68 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ServiceConfigEntity.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ServiceConfigEntity.java
@@ -51,7 +51,8 @@ import javax.persistence.TableGenerator;
     @NamedQuery(name = "ServiceConfigEntity.findNextServiceConfigVersion", query = "SELECT COALESCE(MAX(serviceConfig.version), 0) + 1 AS nextVersion FROM ServiceConfigEntity serviceConfig WHERE serviceConfig.serviceName=:serviceName AND serviceConfig.clusterId=:clusterId"),
     @NamedQuery(name = "ServiceConfigEntity.findAllServiceConfigsByStack", query = "SELECT serviceConfig FROM ServiceConfigEntity serviceConfig WHERE serviceConfig.clusterId=:clusterId AND serviceConfig.stack=:stack"),
     @NamedQuery(name = "ServiceConfigEntity.findLatestServiceConfigsByStack", query = "SELECT serviceConfig FROM ServiceConfigEntity serviceConfig WHERE serviceConfig.clusterId = :clusterId AND serviceConfig.version = (SELECT MAX(serviceConfig2.version) FROM ServiceConfigEntity serviceConfig2 WHERE serviceConfig2.clusterId=:clusterId AND serviceConfig2.stack=:stack AND serviceConfig2.serviceName = serviceConfig.serviceName)"),
-    @NamedQuery(name = "ServiceConfigEntity.findLatestServiceConfigsByService", query = "SELECT scv FROM ServiceConfigEntity scv WHERE scv.clusterId = :clusterId AND scv.serviceName = :serviceName AND scv.version = (SELECT MAX(scv2.version) FROM ServiceConfigEntity scv2 WHERE (scv2.serviceName = :serviceName AND scv2.clusterId = :clusterId) AND (scv2.groupId = scv.groupId OR (scv2.groupId IS NULL AND scv.groupId IS NULL)))")})
+    @NamedQuery(name = "ServiceConfigEntity.findLatestServiceConfigsByService", query = "SELECT scv FROM ServiceConfigEntity scv WHERE scv.clusterId = :clusterId AND scv.serviceName = :serviceName AND scv.version = (SELECT MAX(scv2.version) FROM ServiceConfigEntity scv2 WHERE (scv2.serviceName = :serviceName AND scv2.clusterId = :clusterId) AND (scv2.groupId = scv.groupId OR (scv2.groupId IS NULL AND scv.groupId IS NULL)))"),
+    @NamedQuery(name = "ServiceConfigEntity.findLatestServiceConfigsByCluster", query = "SELECT scv FROM ServiceConfigEntity scv WHERE scv.clusterId = :clusterId AND scv.serviceConfigId IN (SELECT MAX(scv1.serviceConfigId) FROM ServiceConfigEntity scv1 WHERE (scv1.clusterId = :clusterId) AND (scv1.groupId IS NULL) GROUP BY scv1.serviceName)")})
 public class ServiceConfigEntity {
   @Id
   @Column(name = "service_config_id")
@@ -263,4 +264,4 @@ public class ServiceConfigEntity {
     }
     return true;
   }
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/911b9177/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClusterImpl.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClusterImpl.java b/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClusterImpl.java
index 8b157c7..7bf24ce 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClusterImpl.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClusterImpl.java
@@ -2534,12 +2534,10 @@ public class ClusterImpl implements Cluster {
     if (serviceConfigEntity.getGroupId() == null) {
       Collection<String> configTypes = serviceConfigTypes.get(serviceName);
       List<ClusterConfigMappingEntity> mappingEntities =
-          clusterDAO.getClusterConfigMappingEntitiesByCluster(getClusterId());
+          clusterDAO.getSelectedConfigMappingByTypes(getClusterId(), new ArrayList<>(configTypes));
       for (ClusterConfigMappingEntity entity : mappingEntities) {
-        if (configTypes.contains(entity.getType()) && entity.isSelected() > 0) {
-          entity.setSelected(0);
-          entity = clusterDAO.mergeConfigMapping(entity);
-        }
+        entity.setSelected(0);
+        clusterDAO.mergeConfigMapping(entity);
       }
 
       for (ClusterConfigEntity configEntity : serviceConfigEntity.getClusterConfigEntities()) {
@@ -2599,14 +2597,12 @@ public class ClusterImpl implements Cluster {
   @Transactional
   void selectConfig(String type, String tag, String user) {
     Collection<ClusterConfigMappingEntity> entities =
-        clusterDAO.getClusterConfigMappingEntitiesByCluster(getClusterId());
+      clusterDAO.getLatestClusterConfigMappingsEntityByType(getClusterId(), type);
 
     //disable previous config
     for (ClusterConfigMappingEntity e : entities) {
-      if (e.isSelected() > 0 && e.getType().equals(type)) {
-        e.setSelected(0);
-        e = clusterDAO.mergeConfigMapping(e);
-      }
+      e.setSelected(0);
+      clusterDAO.mergeConfigMapping(e);
     }
 
     ClusterEntity clusterEntity = getClusterEntity();
@@ -2672,32 +2668,15 @@ public class ClusterImpl implements Cluster {
   }
 
   private List<ClusterConfigEntity> getClusterConfigEntitiesByService(String serviceName) {
-    List<ClusterConfigEntity> configEntities = new ArrayList<ClusterConfigEntity>();
-
-    //add configs from this service
     Collection<String> configTypes = serviceConfigTypes.get(serviceName);
-    for (ClusterConfigMappingEntity mappingEntity : clusterDAO.getClusterConfigMappingEntitiesByCluster(getClusterId())) {
-      if (mappingEntity.isSelected() > 0 && configTypes.contains(mappingEntity.getType())) {
-        ClusterConfigEntity configEntity =
-          clusterDAO.findConfig(getClusterId(), mappingEntity.getType(), mappingEntity.getTag());
-        if (configEntity != null) {
-          configEntities.add(configEntity);
-        } else {
-          LOG.error("Desired cluster config type={}, tag={} is not present in database," +
-            " unable to add to service config version");
-        }
-      }
-    }
-    return configEntities;
+    return clusterDAO.getLatestClusterConfigsByTypes(getClusterId(), new ArrayList<>(configTypes));
   }
 
   @Override
   public Config getDesiredConfigByType(String configType) {
-    for (ClusterConfigMappingEntity e : clusterDAO.getClusterConfigMappingEntitiesByCluster(
-        getClusterId())) {
-      if (e.isSelected() > 0 && e.getType().equals(configType)) {
-        return getConfig(e.getType(), e.getTag());
-      }
+    List<ClusterConfigMappingEntity> entities = clusterDAO.getLatestClusterConfigMappingsEntityByType(getClusterId(), configType);
+    if (!entities.isEmpty()) {
+      return getConfig(configType, entities.get(0).getTag());
     }
 
     return null;
@@ -3454,5 +3433,4 @@ public class ClusterImpl implements Cluster {
 
     m_clusterPropertyCache.clear();
   }
-
 }


[17/50] ambari git commit: AMBARI-18946 Ambari Integration for Zeppelin and Spark 2.0 (r-kamath)

Posted by sw...@apache.org.
AMBARI-18946 Ambari Integration for Zeppelin and Spark 2.0 (r-kamath)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/182479d3
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/182479d3
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/182479d3

Branch: refs/heads/branch-feature-AMBARI-18901
Commit: 182479d3ce0324422379ca87ec369b7025dfe91f
Parents: d3c7555
Author: Renjith Kamath <re...@gmail.com>
Authored: Tue Nov 29 11:57:41 2016 +0530
Committer: Renjith Kamath <re...@gmail.com>
Committed: Tue Nov 29 11:57:41 2016 +0530

----------------------------------------------------------------------
 .../0.6.0.2.5/package/scripts/master.py         | 15 +++++++----
 .../0.6.0.2.5/package/scripts/params.py         | 28 +++++++++++++++-----
 2 files changed, 31 insertions(+), 12 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/182479d3/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/package/scripts/master.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/package/scripts/master.py b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/package/scripts/master.py
index 33a2a14..e82b53c 100644
--- a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/package/scripts/master.py
+++ b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/package/scripts/master.py
@@ -58,8 +58,12 @@ class Master(Script):
     # update the configs specified by user
     self.configure(env)
 
-    Execute('echo spark_version:' + params.spark_version + ' detected for spark_home: '
-            + params.spark_home + ' >> ' + params.zeppelin_log_file, user=params.zeppelin_user)
+    if params.spark_version:
+      Execute('echo spark_version:' + str(params.spark_version) + ' detected for spark_home: '
+              + params.spark_home + ' >> ' + params.zeppelin_log_file, user=params.zeppelin_user)
+    if params.spark2_version:
+      Execute('echo spark2_version:' + str(params.spark2_version) + ' detected for spark2_home: '
+              + params.spark2_home + ' >> ' + params.zeppelin_log_file, user=params.zeppelin_user)
 
   def create_zeppelin_dir(self, params):
     params.HdfsResource(format("/user/{zeppelin_user}"),
@@ -138,9 +142,10 @@ class Master(Script):
     File(format("{params.conf_dir}/log4j.properties"), content=params.log4j_properties_content,
          owner=params.zeppelin_user, group=params.zeppelin_group)
 
-    # copy hive-site.xml
-    File(format("{params.conf_dir}/hive-site.xml"), content=StaticFile("/etc/spark/conf/hive-site.xml"),
-         owner=params.zeppelin_user, group=params.zeppelin_group)
+    # copy hive-site.xml only if Spark 1.x is installed
+    if 'spark-defaults' in params.config['configurations']:
+        File(format("{params.conf_dir}/hive-site.xml"), content=StaticFile("/etc/spark/conf/hive-site.xml"),
+             owner=params.zeppelin_user, group=params.zeppelin_group)
 
     if len(params.hbase_master_hosts) > 0:
       # copy hbase-site.xml

http://git-wip-us.apache.org/repos/asf/ambari/blob/182479d3/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/package/scripts/params.py b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/package/scripts/params.py
index 27ed150..5a3174f 100644
--- a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/package/scripts/params.py
@@ -39,6 +39,14 @@ def get_port_from_url(address):
   else:
     return address
 
+def extract_spark_version(spark_home):
+  try:
+    with open(spark_home + "/RELEASE") as fline:
+      return re.search('Spark (\d\.\d).+', fline.readline().rstrip()).group(1)
+  except:
+    pass
+  return None
+
 
 # server configurations
 config = Script.get_config()
@@ -58,13 +66,17 @@ spark_jar_dir = config['configurations']['zeppelin-env']['zeppelin.spark.jar.dir
 spark_jar = format("{spark_jar_dir}/zeppelin-spark-0.5.5-SNAPSHOT.jar")
 setup_view = True
 temp_file = config['configurations']['zeppelin-env']['zeppelin.temp.file']
-spark_home = os.path.join(stack_root, "current", "spark-client")
 
-try:
-  fline = open(spark_home + "/RELEASE").readline().rstrip()
-  spark_version = re.search('Spark (\d\.\d).+', fline).group(1)
-except:
-  pass
+spark_home = None
+spark_version = None
+spark2_home = None
+spark2_version = None
+if 'spark-defaults' in config['configurations']:
+  spark_home = os.path.join(stack_root, "current", 'spark-client')
+  spark_version = extract_spark_version(spark_home)
+if 'spark2-defaults' in config['configurations']:
+  spark2_home = os.path.join(stack_root, "current", 'spark2-client')
+  spark2_version = extract_spark_version(spark2_home)
 
 # New Cluster Stack Version that is defined during the RESTART of a Rolling Upgrade
 version = default("/commandParams/version", None)
@@ -126,8 +138,10 @@ if 'hbase_master_hosts' in master_configs and 'hbase-site' in config['configurat
   hbase_zookeeper_quorum = config['configurations']['hbase-site']['hbase.zookeeper.quorum']
 
 # detect spark queue
-if 'spark.yarn.queue' in config['configurations']['spark-defaults']:
+if 'spark-defaults' in config['configurations'] and 'spark.yarn.queue' in config['configurations']['spark-defaults']:
   spark_queue = config['configurations']['spark-defaults']['spark.yarn.queue']
+elif 'spark2-defaults' in config['configurations'] and 'spark.yarn.queue' in config['configurations']['spark2-defaults']:
+  spark_queue = config['configurations']['spark2-defaults']['spark.yarn.queue']
 else:
   spark_queue = 'default'
 


[42/50] ambari git commit: Revert "AMBARI-19020. Ubuntu14/16 Add Support for Zookeeper on HDP 2.5 (Duc Le via ncole)"

Posted by sw...@apache.org.
Revert "AMBARI-19020. Ubuntu14/16 Add Support for Zookeeper on HDP 2.5 (Duc Le via ncole)"

This reverts commit 1632f9b5c6f7822c1fe59c4532250b47ae902513.


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/7632917d
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/7632917d
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/7632917d

Branch: refs/heads/branch-feature-AMBARI-18901
Commit: 7632917d5c7dc24e2025358e176a53ff1b6fe430
Parents: 1632f9b
Author: Nate Cole <nc...@hortonworks.com>
Authored: Wed Nov 30 14:53:18 2016 -0500
Committer: Nate Cole <nc...@hortonworks.com>
Committed: Wed Nov 30 14:53:18 2016 -0500

----------------------------------------------------------------------
 .../stacks/HDP/2.5/services/ZOOKEEPER/metainfo.xml     | 13 -------------
 1 file changed, 13 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/7632917d/ambari-server/src/main/resources/stacks/HDP/2.5/services/ZOOKEEPER/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.5/services/ZOOKEEPER/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.5/services/ZOOKEEPER/metainfo.xml
index 9b9477e..de8ebd9 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.5/services/ZOOKEEPER/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.5/services/ZOOKEEPER/metainfo.xml
@@ -21,19 +21,6 @@
     <service>
       <name>ZOOKEEPER</name>
       <version>3.4.6.2.5</version>
-      <osSpecifics>
-        <osSpecific>
-          <osFamily>ubuntu14,ubuntu16</osFamily>
-          <packages>
-            <package>
-              <name>zookeeper-${stack_version}</name>
-            </package>
-            <package>
-              <name>zookeeper-${stack_version}-server</name>
-            </package>
-          </packages>
-        </osSpecific>
-      </osSpecifics>
     </service>
   </services>
 </metainfo>


[45/50] ambari git commit: AMBARI-19023. After adding an Atlas server the first restart command fails since stop doesn't have any configs; if no pid dir exists during stop, perform no-op (alejandro)

Posted by sw...@apache.org.
AMBARI-19023. After adding an Atlas server the first restart command fails since stop doesn't have any configs; if no pid dir exists during stop, perform no-op (alejandro)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/c578a370
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/c578a370
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/c578a370

Branch: refs/heads/branch-feature-AMBARI-18901
Commit: c578a370722907361393cde668a6b9dedb678829
Parents: 47d2f70
Author: Alejandro Fernandez <af...@hortonworks.com>
Authored: Wed Nov 30 13:51:02 2016 -0800
Committer: Alejandro Fernandez <af...@hortonworks.com>
Committed: Wed Nov 30 13:51:02 2016 -0800

----------------------------------------------------------------------
 .../0.1.0.2.3/package/scripts/metadata_server.py  | 18 ++++++++++++++++++
 .../stacks/2.3/ATLAS/test_metadata_server.py      |  5 ++++-
 2 files changed, 22 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/c578a370/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/metadata_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/metadata_server.py b/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/metadata_server.py
index 712c15c..833f5d3 100644
--- a/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/metadata_server.py
+++ b/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/metadata_server.py
@@ -16,6 +16,10 @@ See the License for the specific language governing permissions and
 limitations under the License.
 
 """
+# Python Imports
+import os
+
+# Local Imports
 from metadata import metadata
 from resource_management import Fail
 from resource_management.libraries.functions import conf_select
@@ -122,6 +126,20 @@ class MetadataServer(Script):
     env.set_params(params)
     daemon_cmd = format('source {params.conf_dir}/atlas-env.sh; {params.metadata_stop_script}')
 
+    # If the pid dir doesn't exist, this means either
+    # 1. The user just added Atlas service and issued a restart command (stop+start). So stop should be a no-op
+    # since there's nothing to stop.
+    # OR
+    # 2. The user changed the value of the pid dir config and incorrectly issued a restart command.
+    # In which case the stop command cannot do anything since Ambari doesn't know which process to kill.
+    # The start command will spawn another instance.
+    # The user should have issued a stop, changed the config, and then started it.
+    if not os.path.isdir(params.pid_dir):
+      Logger.info("*******************************************************************")
+      Logger.info("Will skip the stop command since this is the first time stopping/restarting Atlas "
+                  "and the pid dir does not exist, %s\n" % params.pid_dir)
+      return
+
     try:
       Execute(daemon_cmd,
               user=params.metadata_user,

http://git-wip-us.apache.org/repos/asf/ambari/blob/c578a370/ambari-server/src/test/python/stacks/2.3/ATLAS/test_metadata_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.3/ATLAS/test_metadata_server.py b/ambari-server/src/test/python/stacks/2.3/ATLAS/test_metadata_server.py
index 585dc94..f2fec70 100644
--- a/ambari-server/src/test/python/stacks/2.3/ATLAS/test_metadata_server.py
+++ b/ambari-server/src/test/python/stacks/2.3/ATLAS/test_metadata_server.py
@@ -366,7 +366,10 @@ class TestMetadataServer(RMFTestCase):
                               user = 'atlas',
     )
 
-  def test_stop_default(self):
+  @patch('os.path.isdir')
+  def test_stop_default(self, is_dir_mock):
+    is_dir_mock.return_value = True
+
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/metadata_server.py",
                        classname = "MetadataServer",
                        command = "stop",


[08/50] ambari git commit: temp scv

Posted by sw...@apache.org.
temp scv


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/59d11cf3
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/59d11cf3
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/59d11cf3

Branch: refs/heads/branch-feature-AMBARI-18901
Commit: 59d11cf36b9fa283ba9592c8440f6be53debadd0
Parents: e62bf6e
Author: Myroslav Papirkovskyi <mp...@hortonworks.com>
Authored: Mon Nov 21 19:56:33 2016 +0200
Committer: Myroslav Papirkovskyi <mp...@hortonworks.com>
Committed: Mon Nov 28 18:39:06 2016 +0200

----------------------------------------------------------------------
 .../internal/ClusterControllerImpl.java         |  2 +-
 .../ServiceConfigVersionResourceProvider.java   |  3 ++-
 .../logging/LoggingSearchPropertyProvider.java  | 11 +++++++--
 .../ambari/server/orm/dao/ClusterDAO.java       | 24 ++++++++++++++++++++
 .../ambari/server/orm/dao/ServiceConfigDAO.java | 13 +++++------
 .../orm/entities/ClusterConfigEntity.java       |  7 +++++-
 .../entities/ClusterConfigMappingEntity.java    |  6 +++++
 .../orm/entities/ServiceConfigEntity.java       |  5 ++--
 8 files changed, 57 insertions(+), 14 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/59d11cf3/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClusterControllerImpl.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClusterControllerImpl.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClusterControllerImpl.java
index 32bed7b..c752e80 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClusterControllerImpl.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClusterControllerImpl.java
@@ -858,7 +858,7 @@ public class ClusterControllerImpl implements ClusterController {
       if (compVal == 0) {
         Schema schema = getSchema(resourceType);
 
-        for (Type type : Type.values()) {
+        for (Type type : schema.getKeyTypes()) {
           String keyPropertyId = schema.getKeyPropertyId(type);
           if (keyPropertyId != null) {
             compVal = compareValues(resource1.getPropertyValue(keyPropertyId),

http://git-wip-us.apache.org/repos/asf/ambari/blob/59d11cf3/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ServiceConfigVersionResourceProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ServiceConfigVersionResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ServiceConfigVersionResourceProvider.java
index 2edbe9b..e5ca389 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ServiceConfigVersionResourceProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ServiceConfigVersionResourceProvider.java
@@ -91,8 +91,9 @@ public class ServiceConfigVersionResourceProvider extends
     PROPERTY_IDS.add(SERVICE_CONFIG_VERSION_IS_COMPATIBLE_PROPERTY_ID);
 
     // keys
-    KEY_PROPERTY_IDS.put(Resource.Type.ServiceConfigVersion,SERVICE_CONFIG_VERSION_SERVICE_NAME_PROPERTY_ID);
+    KEY_PROPERTY_IDS.put(Resource.Type.Service,SERVICE_CONFIG_VERSION_SERVICE_NAME_PROPERTY_ID);
     KEY_PROPERTY_IDS.put(Resource.Type.Cluster,SERVICE_CONFIG_VERSION_CLUSTER_NAME_PROPERTY_ID);
+    KEY_PROPERTY_IDS.put(Resource.Type.ServiceConfigVersion,SERVICE_CONFIG_VERSION_PROPERTY_ID);
   }
 
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/59d11cf3/ambari-server/src/main/java/org/apache/ambari/server/controller/logging/LoggingSearchPropertyProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/logging/LoggingSearchPropertyProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/logging/LoggingSearchPropertyProvider.java
index d9db290..16788ed 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/logging/LoggingSearchPropertyProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/logging/LoggingSearchPropertyProvider.java
@@ -39,8 +39,10 @@ import org.apache.log4j.Logger;
 
 import java.util.Collections;
 import java.util.EnumSet;
+import java.util.HashMap;
 import java.util.LinkedList;
 import java.util.List;
+import java.util.Map;
 import java.util.Set;
 import java.util.concurrent.atomic.AtomicInteger;
 
@@ -73,7 +75,7 @@ public class LoggingSearchPropertyProvider implements PropertyProvider {
 
   @Override
   public Set<Resource> populateResources(Set<Resource> resources, Request request, Predicate predicate) throws SystemException {
-
+    Map<String, Boolean> isLogSearchRunning = new HashMap<>();
     for (Resource resource : resources) {
       // obtain the required identifying properties on the host component resource
       final String componentName = (String)resource.getPropertyValue(PropertyHelper.getPropertyId("HostRoles", "component_name"));
@@ -90,7 +92,12 @@ public class LoggingSearchPropertyProvider implements PropertyProvider {
         continue;
       }
 
-      if (!logSearchServerRunning(clusterName)) {
+      Boolean isLogSearchRunningForSpecifiedCluster = isLogSearchRunning.get(clusterName);
+      if (isLogSearchRunningForSpecifiedCluster == null) {
+        isLogSearchRunningForSpecifiedCluster = logSearchServerRunning(clusterName);
+        isLogSearchRunning.put(clusterName, isLogSearchRunningForSpecifiedCluster);
+      }
+      if (!isLogSearchRunningForSpecifiedCluster) {
         continue;
       }
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/59d11cf3/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/ClusterDAO.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/ClusterDAO.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/ClusterDAO.java
index e93ac0e..b727c72 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/ClusterDAO.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/ClusterDAO.java
@@ -116,6 +116,18 @@ public class ClusterDAO {
   }
 
   @RequiresSession
+  public List<ClusterConfigEntity> getLatestClusterConfigsByTypes(Long clusterId, List<String> types) {
+    TypedQuery<ClusterConfigEntity> query = entityManagerProvider.get().createNamedQuery(
+      "ClusterConfigEntity.findLatestClusterConfigsByTypes",
+      ClusterConfigEntity.class);
+
+    query.setParameter("clusterId", clusterId);
+    query.setParameter("types", types);
+
+    return daoUtils.selectList(query);
+  }
+
+  @RequiresSession
   public ClusterConfigEntity findConfig(Long clusterId, String type, Long version) {
     CriteriaBuilder cb = entityManagerProvider.get().getCriteriaBuilder();
     CriteriaQuery<ClusterConfigEntity> cq = cb.createQuery(ClusterConfigEntity.class);
@@ -240,6 +252,18 @@ public class ClusterDAO {
     return daoUtils.selectList(query);
   }
 
+  @RequiresSession
+  public List<ClusterConfigMappingEntity> getLatestClusterConfigMappingsEntityByType(long clusterId, String configType) {
+    TypedQuery<ClusterConfigMappingEntity> query = entityManagerProvider.get().createNamedQuery(
+      "ClusterConfigMappingEntity.findLatestClusterConfigMappingsByType",
+      ClusterConfigMappingEntity.class);
+
+    query.setParameter("clusterId", clusterId);
+    query.setParameter("typeName", configType);
+
+    return daoUtils.selectList(query);
+  }
+
   /**
    * Gets selected mappings for provided config types
    * @param clusterId cluster id

http://git-wip-us.apache.org/repos/asf/ambari/blob/59d11cf3/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/ServiceConfigDAO.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/ServiceConfigDAO.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/ServiceConfigDAO.java
index 2cbee8d..212a9f0 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/ServiceConfigDAO.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/ServiceConfigDAO.java
@@ -115,14 +115,13 @@ public class ServiceConfigDAO {
 
   @RequiresSession
   public List<ServiceConfigEntity> getLastServiceConfigs(Long clusterId) {
-    TypedQuery<ServiceConfigEntity> query = entityManagerProvider.get().
-      createQuery("SELECT scv FROM ServiceConfigEntity scv " +
-        "WHERE scv.clusterId = ?1 AND scv.createTimestamp = (" +
-        "SELECT MAX(scv2.createTimestamp) FROM ServiceConfigEntity scv2 " +
-        "WHERE scv2.serviceName = scv.serviceName AND scv2.clusterId = ?1 AND scv2.groupId IS NULL)",
-        ServiceConfigEntity.class);
+    TypedQuery<ServiceConfigEntity> query = entityManagerProvider.get().createNamedQuery(
+      "ServiceConfigEntity.findLatestServiceConfigsByCluster",
+      ServiceConfigEntity.class);
 
-    return daoUtils.selectList(query, clusterId);
+    query.setParameter("clusterId", clusterId);
+
+    return daoUtils.selectList(query);
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/ambari/blob/59d11cf3/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ClusterConfigEntity.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ClusterConfigEntity.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ClusterConfigEntity.java
index bf0faee..937e872 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ClusterConfigEntity.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ClusterConfigEntity.java
@@ -55,7 +55,12 @@ import javax.persistence.UniqueConstraint;
     @NamedQuery(name = "ClusterConfigEntity.findClusterConfigMappingsByStack",
       query = "SELECT mapping FROM ClusterConfigMappingEntity mapping " +
         "JOIN ClusterConfigEntity config ON mapping.typeName = config.type AND mapping.tag = config.tag " +
-        "WHERE mapping.clusterId = :clusterId AND config.stack = :stack")
+        "WHERE mapping.clusterId = :clusterId AND config.stack = :stack"),
+    @NamedQuery(name = "ClusterConfigEntity.findLatestClusterConfigsByTypes",
+      query = "SELECT cc FROM ClusterConfigEntity cc " +
+        "JOIN ClusterConfigMappingEntity ccm " +
+        "ON cc.clusterId = ccm.clusterId AND cc.type = ccm.typeName AND cc.tag = ccm.tag " +
+        "WHERE cc.clusterId = :clusterId AND ccm.selectedInd > 0 AND ccm.typeName IN :types")
 })
 
 public class ClusterConfigEntity {

http://git-wip-us.apache.org/repos/asf/ambari/blob/59d11cf3/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ClusterConfigMappingEntity.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ClusterConfigMappingEntity.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ClusterConfigMappingEntity.java
index c3c3e9e..04c6030 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ClusterConfigMappingEntity.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ClusterConfigMappingEntity.java
@@ -23,6 +23,8 @@ import javax.persistence.Id;
 import javax.persistence.IdClass;
 import javax.persistence.JoinColumn;
 import javax.persistence.ManyToOne;
+import javax.persistence.NamedQueries;
+import javax.persistence.NamedQuery;
 import javax.persistence.Table;
 
 /**
@@ -31,6 +33,10 @@ import javax.persistence.Table;
 @Table(name = "clusterconfigmapping")
 @Entity
 @IdClass(ClusterConfigMappingEntityPK.class)
+@NamedQueries({
+  @NamedQuery(name = "ClusterConfigMappingEntity.findLatestClusterConfigMappingsByType",
+    query = "SELECT mapping FROM ClusterConfigMappingEntity mapping WHERE mapping.clusterId = :clusterId AND mapping.selectedInd > 0 AND mapping.typeName = :typeName")})
+
 public class ClusterConfigMappingEntity {
 
   @Id

http://git-wip-us.apache.org/repos/asf/ambari/blob/59d11cf3/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ServiceConfigEntity.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ServiceConfigEntity.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ServiceConfigEntity.java
index 7c28835..0df6f68 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ServiceConfigEntity.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ServiceConfigEntity.java
@@ -51,7 +51,8 @@ import javax.persistence.TableGenerator;
     @NamedQuery(name = "ServiceConfigEntity.findNextServiceConfigVersion", query = "SELECT COALESCE(MAX(serviceConfig.version), 0) + 1 AS nextVersion FROM ServiceConfigEntity serviceConfig WHERE serviceConfig.serviceName=:serviceName AND serviceConfig.clusterId=:clusterId"),
     @NamedQuery(name = "ServiceConfigEntity.findAllServiceConfigsByStack", query = "SELECT serviceConfig FROM ServiceConfigEntity serviceConfig WHERE serviceConfig.clusterId=:clusterId AND serviceConfig.stack=:stack"),
     @NamedQuery(name = "ServiceConfigEntity.findLatestServiceConfigsByStack", query = "SELECT serviceConfig FROM ServiceConfigEntity serviceConfig WHERE serviceConfig.clusterId = :clusterId AND serviceConfig.version = (SELECT MAX(serviceConfig2.version) FROM ServiceConfigEntity serviceConfig2 WHERE serviceConfig2.clusterId=:clusterId AND serviceConfig2.stack=:stack AND serviceConfig2.serviceName = serviceConfig.serviceName)"),
-    @NamedQuery(name = "ServiceConfigEntity.findLatestServiceConfigsByService", query = "SELECT scv FROM ServiceConfigEntity scv WHERE scv.clusterId = :clusterId AND scv.serviceName = :serviceName AND scv.version = (SELECT MAX(scv2.version) FROM ServiceConfigEntity scv2 WHERE (scv2.serviceName = :serviceName AND scv2.clusterId = :clusterId) AND (scv2.groupId = scv.groupId OR (scv2.groupId IS NULL AND scv.groupId IS NULL)))")})
+    @NamedQuery(name = "ServiceConfigEntity.findLatestServiceConfigsByService", query = "SELECT scv FROM ServiceConfigEntity scv WHERE scv.clusterId = :clusterId AND scv.serviceName = :serviceName AND scv.version = (SELECT MAX(scv2.version) FROM ServiceConfigEntity scv2 WHERE (scv2.serviceName = :serviceName AND scv2.clusterId = :clusterId) AND (scv2.groupId = scv.groupId OR (scv2.groupId IS NULL AND scv.groupId IS NULL)))"),
+    @NamedQuery(name = "ServiceConfigEntity.findLatestServiceConfigsByCluster", query = "SELECT scv FROM ServiceConfigEntity scv WHERE scv.clusterId = :clusterId AND scv.serviceConfigId IN (SELECT MAX(scv1.serviceConfigId) FROM ServiceConfigEntity scv1 WHERE (scv1.clusterId = :clusterId) AND (scv1.groupId IS NULL) GROUP BY scv1.serviceName)")})
 public class ServiceConfigEntity {
   @Id
   @Column(name = "service_config_id")
@@ -263,4 +264,4 @@ public class ServiceConfigEntity {
     }
     return true;
   }
-}
\ No newline at end of file
+}


[49/50] ambari git commit: AMBARI-19042: Ambari-server: OOZIE stack definition changes required to support credential store

Posted by sw...@apache.org.
AMBARI-19042: Ambari-server: OOZIE stack definition changes required to support credential store


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/7f60b1f1
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/7f60b1f1
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/7f60b1f1

Branch: refs/heads/branch-feature-AMBARI-18901
Commit: 7f60b1f176d9add5ceacfc631d00245623a6edd5
Parents: 7b53d07
Author: Nahappan Somasundaram <ns...@hortonworks.com>
Authored: Wed Nov 30 17:50:21 2016 -0800
Committer: Nahappan Somasundaram <ns...@hortonworks.com>
Committed: Wed Nov 30 22:07:44 2016 -0800

----------------------------------------------------------------------
 .../main/resources/stacks/HDP/2.5/services/OOZIE/metainfo.xml    | 4 ++++
 1 file changed, 4 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/7f60b1f1/ambari-server/src/main/resources/stacks/HDP/2.5/services/OOZIE/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.5/services/OOZIE/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.5/services/OOZIE/metainfo.xml
index f0a7754..75aa9d9 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.5/services/OOZIE/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.5/services/OOZIE/metainfo.xml
@@ -20,6 +20,10 @@
   <services>
     <service>
       <name>OOZIE</name>
+      <credential-store>
+        <supported>true</supported>
+        <enabled>false</enabled>
+      </credential-store>
       <extends>common-services/OOZIE/4.2.0.2.5</extends>
     </service>
   </services>


[50/50] ambari git commit: Merge from branch-2.5

Posted by sw...@apache.org.
Merge from branch-2.5


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/333c7015
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/333c7015
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/333c7015

Branch: refs/heads/branch-feature-AMBARI-18901
Commit: 333c70150963e9bee4388d03356443d9e35b5f7f
Parents: b749054 7f60b1f
Author: Swapan Shridhar <ss...@hortonworks.com>
Authored: Thu Dec 1 00:13:14 2016 -0800
Committer: Swapan Shridhar <ss...@hortonworks.com>
Committed: Thu Dec 1 00:13:14 2016 -0800

----------------------------------------------------------------------
 .../views/stackVersions/stackVersionPage.html   |   2 +-
 ambari-agent/conf/unix/agent-multiplier.py      |  14 +-
 .../src/main/python/ambari_agent/ActionQueue.py |  10 +-
 .../ambari_agent/CustomServiceOrchestrator.py   |  21 +-
 .../TestCustomServiceOrchestrator.py            |  25 +-
 .../libraries/script/script.py                  |  15 +-
 .../logfeeder/metrics/LogFeederAMSClient.java   |   3 +-
 .../logfeeder/metrics/MetricsManager.java       |   2 +-
 .../org/apache/ambari/logsearch/LogSearch.java  |  15 +-
 .../logsearch/common/ExternalServerClient.java  |  12 +-
 .../logsearch/common/LogSearchConstants.java    |   1 +
 .../AbstractOperationHolderConverter.java       |   9 +
 ...actServiceLogRequestFacetQueryConverter.java |  44 ++
 .../BaseServiceLogRequestQueryConverter.java    |   1 +
 ...ServiceLogAnyGraphRequestQueryConverter.java |  13 +
 ...eLogComponentLevelRequestQueryConverter.java |  15 +-
 ...eLogComponentRequestFacetQueryConverter.java |  15 +-
 ...rviceLogLevelCountRequestQueryConverter.java |  16 +
 ...eLogLevelDateRangeRequestQueryConverter.java |  16 +-
 ...erviceLogTreeRequestFacetQueryConverter.java |  17 +-
 .../ambari/logsearch/doc/DocConstants.java      |   1 +
 .../logsearch/graph/GraphDataGenerator.java     |  26 +-
 .../logsearch/manager/ServiceLogsManager.java   |   2 +-
 .../request/ServiceLogParamDefinition.java      |   6 +
 .../request/impl/BaseServiceLogRequest.java     |  13 +
 .../LogsearchKRBAuthenticationFilter.java       |   4 +-
 .../src/main/resources/logsearch.properties     |   2 +-
 .../dashboard/BubbleGraphTableLayoutView.js     |   4 +-
 .../views/dashboard/ComponentListView.js        |   2 +-
 .../scripts/views/dashboard/HostListView.js     |  14 +-
 .../main/webapp/scripts/views/tabs/TreeView.js  |  10 +-
 .../webapp/templates/tabs/TreeView_tmpl.html    | 115 ++--
 ...ComponentLevelRequestQueryConverterTest.java |   7 +-
 ...ComponentRequestFacetQueryConverterTest.java |   4 +-
 .../ambari-metrics/datasource.js                |   6 +-
 .../conf/unix/ambari-metrics-grafana            |  34 +-
 ambari-server/docs/configuration/index.md       |  25 +-
 ambari-server/pom.xml                           |   2 +
 ambari-server/sbin/ambari-server                |   6 +-
 .../server/actionmanager/ActionScheduler.java   |   2 +-
 .../ambari/server/agent/HeartBeatHandler.java   |   2 +-
 .../ambari/server/agent/HeartbeatProcessor.java |   2 +-
 .../ambari/server/checks/CheckDescription.java  |  13 +
 .../checks/DatabaseConsistencyCheckHelper.java  |  10 +-
 .../server/checks/ServicePresenceCheck.java     | 172 ++++++
 .../server/configuration/Configuration.java     |  30 +-
 .../internal/ClusterControllerImpl.java         |   2 +-
 .../ServiceConfigVersionResourceProvider.java   |   3 +-
 .../logging/LoggingSearchPropertyProvider.java  |  11 +-
 .../ambari/server/events/AmbariEvent.java       |   5 +
 .../events/ClusterConfigFinishedEvent.java      |  53 ++
 .../ambari/server/orm/dao/ClusterDAO.java       |  24 +
 .../ambari/server/orm/dao/ServiceConfigDAO.java |  13 +-
 .../orm/entities/ClusterConfigEntity.java       |   7 +-
 .../entities/ClusterConfigMappingEntity.java    |   6 +
 .../orm/entities/ServiceConfigEntity.java       |   5 +-
 .../server/orm/helpers/dbms/MySqlHelper.java    |  18 +
 .../AmbariLdapAuthenticationProvider.java       |  23 +-
 .../AmbariLdapBindAuthenticator.java            | 233 ++++++-
 .../kerberos/MITKerberosOperationHandler.java   |  22 +-
 .../upgrades/RangerKmsProxyConfig.java          |  95 +++
 .../server/state/cluster/ClusterImpl.java       |  42 +-
 .../ambari/server/state/quicklinks/Link.java    |  21 +
 .../services/RetryUpgradeActionService.java     |   2 +-
 .../ambari/server/topology/TopologyManager.java |  16 +-
 .../server/upgrade/SchemaUpgradeHelper.java     |  35 +-
 .../server/upgrade/UpgradeCatalog250.java       |  66 +-
 .../ambari/server/utils/ShellCommandUtil.java   |  11 +-
 .../ambari/server/view/ViewExtractor.java       |   2 +-
 .../apache/ambari/server/view/ViewRegistry.java | 381 +++++++-----
 ambari-server/src/main/python/ambari-server.py  | 149 ++++-
 .../python/ambari_server/serverConfiguration.py |   9 +-
 .../main/python/ambari_server/serverUtils.py    |  11 +
 .../main/python/ambari_server/setupActions.py   |   1 +
 .../main/python/ambari_server/setupMpacks.py    | 167 ++++-
 .../src/main/python/ambari_server/utils.py      |  24 +-
 .../src/main/python/ambari_server_main.py       |  44 +-
 .../main/resources/Ambari-DDL-MySQL-CREATE.sql  |   1 +
 .../0.1.0/configuration/storm-site.xml          |  84 ---
 .../AMBARI_METRICS/0.1.0/metainfo.xml           |   4 -
 .../package/scripts/metadata_server.py          |  18 +
 .../FLUME/1.4.0.2.0/metrics.json                |  62 --
 .../package/files/hbaseSmokeVerify.sh           |   2 +-
 .../HBASE/0.96.0.2.0/package/scripts/upgrade.py |   3 +-
 .../package/scripts/hive_server_interactive.py  |   5 +-
 .../0.12.0.2.0/package/scripts/params_linux.py  |   1 +
 .../configuration/logfeeder-ambari-config.xml   |  37 ++
 .../configuration/logfeeder-output-config.xml   |  37 ++
 .../configuration/logsearch-properties.xml      |  40 ++
 .../LOGSEARCH/0.5.0/metainfo.xml                |   5 +
 .../LOGSEARCH/0.5.0/package/scripts/params.py   |  17 +-
 .../0.5.0/package/scripts/setup_logfeeder.py    |  10 +
 .../templates/input.config-ambari.json.j2       | 602 -------------------
 .../package/templates/output.config.json.j2     |  61 --
 .../properties/input.config-ambari.json.j2      | 602 +++++++++++++++++++
 .../0.5.0/properties/output.config.json.j2      |  61 ++
 .../RANGER/0.4.0/package/scripts/params.py      |  14 +
 .../0.4.0/package/scripts/setup_ranger_xml.py   |  21 +
 .../common-services/RANGER/0.6.0/metainfo.xml   |   2 +
 .../STORM/1.0.1/configuration/storm-site.xml    |  54 ++
 .../0.6.0.2.5/package/scripts/master.py         |  15 +-
 .../0.6.0.2.5/package/scripts/params.py         |  28 +-
 .../custom_actions/scripts/install_packages.py  |   2 +-
 .../src/main/resources/scripts/stack_advisor.py |  11 +-
 .../HDP/2.0.6/configuration/cluster-env.xml     |  10 +-
 .../stacks/HDP/2.0.6/services/stack_advisor.py  |  50 +-
 .../stacks/HDP/2.1/services/stack_advisor.py    |  20 +-
 .../stacks/HDP/2.2/services/stack_advisor.py    |   7 -
 .../HDP/2.3/upgrades/nonrolling-upgrade-2.5.xml |  12 +-
 .../HDP/2.3/upgrades/nonrolling-upgrade-2.6.xml |  12 +-
 .../stacks/HDP/2.3/upgrades/upgrade-2.5.xml     |  16 +-
 .../stacks/HDP/2.3/upgrades/upgrade-2.6.xml     |  16 +-
 .../HDP/2.4/upgrades/nonrolling-upgrade-2.5.xml |  12 +-
 .../HDP/2.4/upgrades/nonrolling-upgrade-2.6.xml |  12 +-
 .../stacks/HDP/2.4/upgrades/upgrade-2.5.xml     |  16 +-
 .../stacks/HDP/2.4/upgrades/upgrade-2.6.xml     |  16 +-
 .../stacks/HDP/2.5/role_command_order.json      |   2 +-
 .../HIVE/configuration/hive-interactive-env.xml |  10 +
 .../stacks/HDP/2.5/services/KNOX/metainfo.xml   |   4 -
 .../stacks/HDP/2.5/services/OOZIE/metainfo.xml  |   4 +
 .../stacks/HDP/2.5/services/stack_advisor.py    |  52 +-
 .../HDP/2.5/upgrades/host-upgrade-2.5.xml       |  11 +-
 .../services/ATLAS/themes/theme_version_2.json  |  28 +-
 .../configuration/hive-interactive-site.xml     |  58 ++
 .../HIVE/configuration/tez-interactive-site.xml |  78 +++
 .../stacks/HDP/2.6/services/stack_advisor.py    |   2 +-
 .../PERF/1.0/configuration/cluster-env.xml      |   6 +-
 .../GRUMPY/configuration/grumpy-site.xml        |   2 +-
 .../services/HAPPY/configuration/happy-site.xml |   2 +-
 .../services/HBASE/configuration/hbase-env.xml  |  30 +-
 .../HBASE/configuration/hbase-log4j.xml         |   2 +-
 .../configuration/hbase-logsearch-conf.xml      |   6 +-
 .../HBASE/configuration/hbase-policy.xml        |   6 +-
 .../services/HBASE/configuration/hbase-site.xml |  74 +--
 .../services/HDFS/configuration/core-site.xml   |  32 +-
 .../services/HDFS/configuration/hadoop-env.xml  |  44 +-
 .../hadoop-metrics2.properties.xml              |   2 +-
 .../HDFS/configuration/hadoop-policy.xml        |  22 +-
 .../services/HDFS/configuration/hdfs-log4j.xml  |   2 +-
 .../HDFS/configuration/hdfs-logsearch-conf.xml  |   6 +-
 .../services/HDFS/configuration/hdfs-site.xml   | 108 ++--
 .../services/HDFS/configuration/ssl-client.xml  |  14 +-
 .../services/HDFS/configuration/ssl-server.xml  |  16 +-
 .../SLEEPY/configuration/sleepy-site.xml        |   2 +-
 .../services/SNOW/configuration/snow-site.xml   |   2 +-
 .../YARN/configuration-mapred/mapred-env.xml    |   2 +-
 .../YARN/configuration-mapred/mapred-site.xml   |  28 +-
 .../services/YARN/configuration/yarn-env.xml    |   8 +-
 .../services/YARN/configuration/yarn-log4j.xml  |   2 +-
 .../services/YARN/configuration/yarn-site.xml   | 150 ++---
 .../src/main/resources/stacks/stack_advisor.py  | 200 +-----
 .../server/checks/ServicePresenceCheckTest.java | 217 +++++++
 .../StackDefinedPropertyProviderTest.java       |   2 +
 .../RestMetricsPropertyProviderTest.java        |   2 +
 .../AmbariLdapBindAuthenticatorTest.java        | 226 ++++---
 .../MITKerberosOperationHandlerTest.java        |  23 +
 .../upgrades/RangerKmsProxyConfigTest.java      | 141 +++++
 .../QuickLinksConfigurationModuleTest.java      |  36 ++
 .../ClusterDeployWithStartOnlyTest.java         |   2 +-
 ...InstallWithoutStartOnComponentLevelTest.java |   2 +-
 .../ClusterInstallWithoutStartTest.java         |   2 +-
 .../server/upgrade/UpgradeCatalog250Test.java   |   5 +
 .../server/utils/TestShellCommandUtil.java      |  13 +-
 .../ambari/server/view/ViewRegistryTest.java    |   2 +
 .../src/test/python/TestAmbariServer.py         |   4 +-
 ambari-server/src/test/python/TestMpacks.py     | 223 +++++--
 ambari-server/src/test/python/TestUtils.py      |  26 +-
 .../grafana-dashboards/grafana-hdfs-users.json  | 270 +++++++++
 .../dashboards/service-metrics/STORM.txt        |   7 +
 .../stacks/2.0.6/common/test_stack_advisor.py   |  34 +-
 .../stacks/2.1/common/test_stack_advisor.py     |   5 -
 .../stacks/2.2/common/test_stack_advisor.py     |  19 +-
 .../stacks/2.3/ATLAS/test_metadata_server.py    |   5 +-
 .../stacks/2.4/LOGSEARCH/test_logfeeder.py      |  14 +-
 .../test/python/stacks/2.4/configs/default.json |   6 +
 .../stacks/2.5/common/test_stack_advisor.py     |  79 ++-
 .../stacks/2.6/common/test_stack_advisor.py     |   4 +-
 .../test/python/stacks/test_stack_adviser.py    | 239 --------
 .../python/uninstall/common-services/SERVICEA   |   1 +
 .../python/uninstall/common-services/SERVICEB   |   1 +
 .../test/python/uninstall/dashboards/SERVICEA   |   1 +
 .../test/python/uninstall/dashboards/SERVICEB   |   1 +
 .../uninstall/dashboards/files/README.txt       |  17 +
 .../python/uninstall/dashboards/files/STORM.txt |   1 +
 .../uninstall/dashboards/files/metainfo.xml     |   1 +
 .../test/python/uninstall/extensions/SERVICEA   |   1 +
 .../test/python/uninstall/extensions/SERVICEB   |   1 +
 .../test/python/uninstall/stacks/2.0/SERVICEA   |   1 +
 .../test/python/uninstall/stacks/2.0/SERVICEB   |   1 +
 .../uninstall/stacks/2.0/files/README.txt       |  17 +
 .../uninstall/stacks/2.0/files/metainfo1.xml    |   1 +
 .../uninstall/stacks/2.0/files/metainfo2.xml    |   1 +
 .../child_quicklinks_with_properties.json       |  64 ++
 .../parent_quicklinks_with_properties.json      |  65 ++
 ambari-web/app/assets/test/tests.js             |   5 +
 ambari-web/app/controllers.js                   |   1 +
 .../main/admin/service_auto_start.js            |  58 +-
 .../admin/stack_upgrade_history_controller.js   | 217 +++++++
 .../alert_definitions_actions_controller.js     |   4 +-
 ambari-web/app/data/HDP2.3/site_properties.js   |  25 +-
 ambari-web/app/data/HDP2/site_properties.js     |  28 +
 ambari-web/app/mappers.js                       |   3 +-
 .../app/mappers/stack_upgrade_history_mapper.js |  54 ++
 ambari-web/app/messages.js                      |  40 +-
 ambari-web/app/mixins.js                        |   1 +
 ambari-web/app/models.js                        |   2 +
 .../app/models/finished_upgrade_entity.js       |  92 +++
 .../stack_version/stack_upgrade_history.js      |  37 ++
 ambari-web/app/routes/main.js                   |   7 +
 ambari-web/app/styles/stack_versions.less       |  69 +++
 .../highAvailability/journalNode/step2.hbs      |  14 +-
 .../templates/main/admin/service_auto_start.hbs |  12 +-
 .../admin/stack_upgrade/upgrade_history.hbs     | 105 ++++
 .../stack_upgrade/upgrade_history_details.hbs   |  46 ++
 .../app/utils/configs/config_initializer.js     |  28 +-
 .../mount_points_based_initializer_mixin.js     | 340 +++++++++++
 ambari-web/app/views.js                         |   2 +
 ambari-web/app/views/main/admin.js              |   3 +-
 .../app/views/main/admin/service_auto_start.js  |  25 +-
 .../service_auto_start/component_auto_start.js  |  10 +
 .../views/main/admin/stack_upgrade/menu_view.js |   6 +
 .../upgrade_history_details_view.js             |  85 +++
 .../admin/stack_upgrade/upgrade_history_view.js | 303 ++++++++++
 ambari-web/app/views/main/dashboard/widgets.js  |  17 +-
 .../stack_upgrade_history_controller_test.js    | 125 ++++
 .../stack_upgrade_history_mapper_test.js        | 372 ++++++++++++
 .../test/models/finished_upgrade_entity_test.js | 197 ++++++
 ambari-web/test/utils/ajax/ajax_test.js         |   9 +-
 .../utils/configs/config_initializer_test.js    | 458 ++++++++++++++
 .../upgrade_history_details_view_test.js        | 248 ++++++++
 .../stack_upgrade/upgrade_history_view_test.js  | 173 ++++++
 .../upgrade_version_box_view_test.js            |   2 +-
 .../test/views/main/dashboard/widgets_test.js   |  11 +-
 contrib/utils/perf/deploy-gce-perf-cluster.py   | 405 +++++++++----
 contrib/views/ambari-views-package/pom.xml      | 123 ++++
 .../src/main/package/deb/control/control        |  22 +
 contrib/views/capacity-scheduler/pom.xml        |  18 +
 contrib/views/files/pom.xml                     |  18 +
 contrib/views/hawq/pom.xml                      |  18 +
 contrib/views/hive-next/pom.xml                 |  18 +
 contrib/views/hive/pom.xml                      |  18 +
 contrib/views/hueambarimigration/pom.xml        |  18 +
 contrib/views/jobs/pom.xml                      |  23 +
 contrib/views/pig/pom.xml                       |  18 +
 contrib/views/pom.xml                           |   5 +
 contrib/views/slider/pom.xml                    |  18 +
 contrib/views/storm/pom.xml                     |  23 +
 contrib/views/tez/pom.xml                       |  18 +
 contrib/views/wfmanager/pom.xml                 |  18 +
 .../resources/ui/app/components/job-details.js  |   3 +
 contrib/views/zeppelin/pom.xml                  |  18 +
 251 files changed, 8434 insertions(+), 2681 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/333c7015/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog250.java
----------------------------------------------------------------------
diff --cc ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog250.java
index 6de48c1,52de784..1f44e28
--- a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog250.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog250.java
@@@ -17,8 -17,14 +17,16 @@@
   */
  package org.apache.ambari.server.upgrade;
  
 +import com.google.inject.Inject;
 +import com.google.inject.Injector;
+ import java.sql.SQLException;
+ import java.util.ArrayList;
+ import java.util.Collections;
+ import java.util.HashMap;
+ import java.util.List;
+ import java.util.Map;
+ import java.util.Set;
+ 
  import org.apache.ambari.server.AmbariException;
  import org.apache.ambari.server.actionmanager.CommandExecutionType;
  import org.apache.ambari.server.controller.AmbariManagementController;
@@@ -32,13 -38,9 +40,6 @@@ import org.apache.commons.lang.StringUt
  import org.slf4j.Logger;
  import org.slf4j.LoggerFactory;
  
- import java.sql.SQLException;
- import java.util.ArrayList;
- import java.util.Collections;
- import java.util.HashMap;
- import java.util.List;
- import java.util.Map;
 -import com.google.inject.Inject;
 -import com.google.inject.Injector;
--
  /**
   * Upgrade catalog for version 2.5.0.
   */
@@@ -133,8 -135,7 +134,9 @@@ public class UpgradeCatalog250 extends 
      addNewConfigurationsFromXml();
      updateAMSConfigs();
      updateKafkaConfigs();
 +    updateHIVEInteractiveConfigs();
 +    updateTEZInteractiveConfigs();
+     updateHiveLlapConfigs();
    }
  
    protected void updateHostVersionTable() throws SQLException {

http://git-wip-us.apache.org/repos/asf/ambari/blob/333c7015/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server_interactive.py
----------------------------------------------------------------------
diff --cc ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server_interactive.py
index 4866432,7335637..2c5ebeb
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server_interactive.py
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server_interactive.py
@@@ -277,20 -274,10 +277,21 @@@ class HiveServerInteractiveDefault(Hive
  
        unique_name = "llap-slider%s" % datetime.utcnow().strftime('%Y-%m-%d_%H-%M-%S')
  
 +      # Figure out the Slider Anti-affinity to be used.
 +      # YARN does not support anti-affinity, and therefore Slider implements AA by the means of exclusion lists, i.e, it
 +      # starts containers one by one and excludes the nodes it gets (adding a delay of ~2sec./machine). When the LLAP
 +      # container memory size configuration is more than half of YARN node memory, AA is implicit and should be avoided.
 +      slider_placement = 4
 +      if long(params.llap_daemon_container_size) > (0.5 * long(params.yarn_nm_mem)):
 +        slider_placement = 0
 +        Logger.info("Setting slider_placement : 0, as llap_daemon_container_size : {0} > 0.5 * "
 +                    "YARN NodeManager Memory({1})".format(params.llap_daemon_container_size, params.yarn_nm_mem))
 +
        cmd = format("{stack_root}/current/hive-server2-hive2/bin/hive --service llap --instances {params.num_llap_nodes}"
-                    " --slider-am-container-mb {params.slider_am_container_mb} --size {params.llap_daemon_container_size}m "
+                    " --slider-am-container-mb {params.slider_am_container_mb} --size {params.llap_daemon_container_size}m"
                     " --cache {params.hive_llap_io_mem_size}m --xmx {params.llap_heap_size}m --loglevel {params.llap_log_level}"
-                    " --slider-placement {slider_placement} --output {LLAP_PACKAGE_CREATION_PATH}/{unique_name}")
++                   " --slider-placement {slider_placement} --output {LLAP_PACKAGE_CREATION_PATH}/{unique_name}"
+                    " {params.llap_extra_slider_opts} --output {LLAP_PACKAGE_CREATION_PATH}/{unique_name}")
        if params.security_enabled:
          llap_keytab_splits = params.hive_llap_keytab_file.split("/")
          Logger.debug("llap_keytab_splits : {0}".format(llap_keytab_splits))

http://git-wip-us.apache.org/repos/asf/ambari/blob/333c7015/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/333c7015/ambari-server/src/main/resources/stacks/HDP/2.5/services/HIVE/configuration/hive-interactive-env.xml
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/333c7015/ambari-server/src/main/resources/stacks/HDP/2.5/services/stack_advisor.py
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/333c7015/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog250Test.java
----------------------------------------------------------------------
diff --cc ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog250Test.java
index 5b520c3,ce0b387..dc8a7e5
--- a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog250Test.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog250Test.java
@@@ -210,15 -211,13 +210,17 @@@ public class UpgradeCatalog250Test 
    public void testExecuteDMLUpdates() throws Exception {
      Method updateAmsConfigs = UpgradeCatalog250.class.getDeclaredMethod("updateAMSConfigs");
      Method updateKafkaConfigs = UpgradeCatalog250.class.getDeclaredMethod("updateKafkaConfigs");
+     Method updateHiveLlapConfigs = UpgradeCatalog250.class.getDeclaredMethod("updateHiveLlapConfigs");
      Method addNewConfigurationsFromXml = AbstractUpgradeCatalog.class.getDeclaredMethod("addNewConfigurationsFromXml");
 +    Method updateHIVEInteractiveConfigs = UpgradeCatalog250.class.getDeclaredMethod("updateHIVEInteractiveConfigs");
 +    Method updateTEZInteractiveConfigs = UpgradeCatalog250.class.getDeclaredMethod("updateTEZInteractiveConfigs");
  
      UpgradeCatalog250 upgradeCatalog250 = createMockBuilder(UpgradeCatalog250.class)
        .addMockedMethod(updateAmsConfigs)
        .addMockedMethod(updateKafkaConfigs)
 +      .addMockedMethod(updateHIVEInteractiveConfigs)
 +      .addMockedMethod(updateTEZInteractiveConfigs)
+       .addMockedMethod(updateHiveLlapConfigs)
        .addMockedMethod(addNewConfigurationsFromXml)
        .createMock();
  
@@@ -232,12 -231,9 +234,15 @@@
      upgradeCatalog250.updateKafkaConfigs();
      expectLastCall().once();
  
 +    upgradeCatalog250.updateHIVEInteractiveConfigs();
 +    expectLastCall().once();
 +
 +    upgradeCatalog250.updateTEZInteractiveConfigs();
 +    expectLastCall().once();
 +
+     upgradeCatalog250.updateHiveLlapConfigs();
+     expectLastCall().once();
+ 
      replay(upgradeCatalog250);
  
      upgradeCatalog250.executeDMLUpdates();


[41/50] ambari git commit: AMBARI-19020. Ubuntu14/16 Add Support for Zookeeper on HDP 2.5 (Duc Le via ncole)

Posted by sw...@apache.org.
AMBARI-19020. Ubuntu14/16 Add Support for Zookeeper on HDP 2.5 (Duc Le via ncole)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/1632f9b5
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/1632f9b5
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/1632f9b5

Branch: refs/heads/branch-feature-AMBARI-18901
Commit: 1632f9b5c6f7822c1fe59c4532250b47ae902513
Parents: ef41837
Author: Nate Cole <nc...@hortonworks.com>
Authored: Wed Nov 30 12:49:48 2016 -0500
Committer: Nate Cole <nc...@hortonworks.com>
Committed: Wed Nov 30 14:08:39 2016 -0500

----------------------------------------------------------------------
 .../stacks/HDP/2.5/services/ZOOKEEPER/metainfo.xml     | 13 +++++++++++++
 1 file changed, 13 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/1632f9b5/ambari-server/src/main/resources/stacks/HDP/2.5/services/ZOOKEEPER/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.5/services/ZOOKEEPER/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.5/services/ZOOKEEPER/metainfo.xml
index de8ebd9..9b9477e 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.5/services/ZOOKEEPER/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.5/services/ZOOKEEPER/metainfo.xml
@@ -21,6 +21,19 @@
     <service>
       <name>ZOOKEEPER</name>
       <version>3.4.6.2.5</version>
+      <osSpecifics>
+        <osSpecific>
+          <osFamily>ubuntu14,ubuntu16</osFamily>
+          <packages>
+            <package>
+              <name>zookeeper-${stack_version}</name>
+            </package>
+            <package>
+              <name>zookeeper-${stack_version}-server</name>
+            </package>
+          </packages>
+        </osSpecific>
+      </osSpecifics>
     </service>
   </services>
 </metainfo>


[13/50] ambari git commit: AMBARI-18841 : Grafana fails to start (Commit 2) (avijayan)

Posted by sw...@apache.org.
AMBARI-18841 : Grafana fails to start (Commit 2) (avijayan)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/1b63b909
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/1b63b909
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/1b63b909

Branch: refs/heads/branch-feature-AMBARI-18901
Commit: 1b63b9094f28b04fa361d8ad3c7ab122716d544d
Parents: 98e41c6
Author: Aravindan Vijayan <av...@hortonworks.com>
Authored: Mon Nov 28 10:06:32 2016 -0800
Committer: Aravindan Vijayan <av...@hortonworks.com>
Committed: Mon Nov 28 10:06:32 2016 -0800

----------------------------------------------------------------------
 .../conf/unix/ambari-metrics-grafana            | 34 ++++++--------------
 1 file changed, 9 insertions(+), 25 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/1b63b909/ambari-metrics/ambari-metrics-grafana/conf/unix/ambari-metrics-grafana
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-grafana/conf/unix/ambari-metrics-grafana b/ambari-metrics/ambari-metrics-grafana/conf/unix/ambari-metrics-grafana
index eeb189b..472942e 100644
--- a/ambari-metrics/ambari-metrics-grafana/conf/unix/ambari-metrics-grafana
+++ b/ambari-metrics/ambari-metrics-grafana/conf/unix/ambari-metrics-grafana
@@ -94,30 +94,14 @@ function isRunning() {
 
 case "$1" in
   start)
-    echo -n $"Starting $DESC: .... "
+    echo $"Starting $DESC: .... " >> $LOG_FILE
 
     isRunning
     if [ $? -eq 0 ]; then
-      echo "Already running."
+      echo "Already running." >> $LOG_FILE
       exit 0
     fi
 
-    echo "$(date) Checking for previously running Grafana with out of sync PID..." >> $LOG_FILE
-    if [ -n "`ps ax | grep -w ambari-metrics-grafana/bin/grafana-server`" ]; then
-      PID=`ps aux | grep -w ambari-metrics-grafana/bin/grafana-server | head -n 1 | awk '{print $2}'`
-      echo "$(date) Grafana Server already running with PID: ${PID}. Killing that instance"  >> $LOG_FILE
-      kill "${PID}" >/dev/null 2>&1
-      sleep "${STOP_TIMEOUT}"
-
-      if kill -0 "${PID}" > /dev/null 2>&1; then
-        echo "$(date) WARNING: Grafana Server did not stop gracefully. Trying to kill with kill -9" >> $LOG_FILE
-        kill -9 "${PID}" >/dev/null 2>&1
-      fi
-
-    else
-      echo "$(date) No out of sync Grafana server process found" >> $LOG_FILE
-    fi
-
     # Prepare environment
     # mkdir -p "$LOG_DIR" "$DATA_DIR" && chown "$GRAFANA_USER":"$GRAFANA_GROUP" "$LOG_DIR" "$DATA_DIR"
     # touch "$PID_FILE" && chown "$GRAFANA_USER":"$GRAFANA_GROUP" "$PID_FILE"
@@ -135,7 +119,7 @@ case "$1" in
       sleep 1
       # check if pid file has been written two
       if ! [[ -s $PID_FILE ]]; then
-        echo "FAILED"
+        echo "Start FAILED" >> $LOG_FILE
         exit 1
       fi
       i=0
@@ -152,11 +136,11 @@ case "$1" in
       done
     fi
 
-    echo "OK"
+    echo "OK" >> $LOG_FILE
     exit $return
     ;;
   stop)
-    echo -n "Stopping $DESC ..."
+    echo -n "Stopping $DESC ..." >> $LOG_FILE
 
     if [ -f "$PID_FILE" ]; then
       pid=$(cat "$PID_FILE")
@@ -165,18 +149,18 @@ case "$1" in
       sleep "${STOP_TIMEOUT}"
 
       if kill -0 "${pid}" > /dev/null 2>&1; then
-        echo "WARNING: $DESC did not stop gracefully after ${STOP_TIMEOUT} seconds: Trying to kill with kill -9"
+        echo "WARNING: $DESC did not stop gracefully after ${STOP_TIMEOUT} seconds: Trying to kill with kill -9" >> $LOG_FILE
         kill -9 "${pid}" >/dev/null 2>&1
       fi
 
       if ps -p "${pid}" > /dev/null 2>&1; then
-        echo "ERROR: Unable to kill ${pid}"
+        echo "ERROR: Unable to kill ${pid}" >> $LOG_FILE
       else
-        rm -f "${pidfile}" >/dev/null 2>&1
+        rm -f "$PID_FILE" >/dev/null 2>&1
       fi
       echo "OK"
     else
-      echo -n "(not running)"
+      echo -n "(not running)" >> $LOG_FILE
     fi
     exit 0
     ;;


[38/50] ambari git commit: AMBARI-18962:Make check_package_condition in script.py overridable by services in stacks (dili)

Posted by sw...@apache.org.
AMBARI-18962:Make check_package_condition in script.py overridable by services in stacks (dili)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/841a064d
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/841a064d
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/841a064d

Branch: refs/heads/branch-feature-AMBARI-18901
Commit: 841a064d47410184e8bc5101660aa957ac78e268
Parents: eec985b
Author: Di Li <di...@apache.org>
Authored: Wed Nov 30 11:13:19 2016 -0500
Committer: Di Li <di...@apache.org>
Committed: Wed Nov 30 11:22:15 2016 -0500

----------------------------------------------------------------------
 .../resource_management/libraries/script/script.py   | 15 +++++++++------
 .../custom_actions/scripts/install_packages.py       |  2 +-
 2 files changed, 10 insertions(+), 7 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/841a064d/ambari-common/src/main/python/resource_management/libraries/script/script.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/libraries/script/script.py b/ambari-common/src/main/python/resource_management/libraries/script/script.py
index 99970ca..5375a53 100644
--- a/ambari-common/src/main/python/resource_management/libraries/script/script.py
+++ b/ambari-common/src/main/python/resource_management/libraries/script/script.py
@@ -555,7 +555,7 @@ class Script(object):
       if isinstance(package_list_str, basestring) and len(package_list_str) > 0:
         package_list = json.loads(package_list_str)
         for package in package_list:
-          if Script.check_package_condition(package):
+          if self.check_package_condition(package):
             name = self.format_package_name(package['name'])
             # HACK: On Windows, only install ambari-metrics packages using Choco Package Installer
             # TODO: Update this once choco packages for hadoop are created. This is because, service metainfo.xml support
@@ -579,22 +579,25 @@ class Script(object):
                           str(config['hostLevelParams']['stack_version']))
       reload_windows_env()
       
-  @staticmethod
-  def check_package_condition(package):
-    from resource_management.libraries.functions import package_conditions
+  def check_package_condition(self, package):
     condition = package['condition']
-    name = package['name']
     
     if not condition:
       return True
     
+    return self.should_install_package(package)
+
+  def should_install_package(self, package):
+    from resource_management.libraries.functions import package_conditions
+    condition = package['condition']
     try:
       chooser_method = getattr(package_conditions, condition)
     except AttributeError:
+      name = package['name']
       raise Fail("Condition with name '{0}', when installing package {1}. Please check package_conditions.py.".format(condition, name))
 
     return chooser_method()
-      
+
   @staticmethod
   def matches_any_regexp(string, regexp_list):
     for regex in regexp_list:

http://git-wip-us.apache.org/repos/asf/ambari/blob/841a064d/ambari-server/src/main/resources/custom_actions/scripts/install_packages.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/custom_actions/scripts/install_packages.py b/ambari-server/src/main/resources/custom_actions/scripts/install_packages.py
index 0e31310..112abe3 100644
--- a/ambari-server/src/main/resources/custom_actions/scripts/install_packages.py
+++ b/ambari-server/src/main/resources/custom_actions/scripts/install_packages.py
@@ -460,7 +460,7 @@ class InstallPackages(Script):
     """
     filtered_package_list = []
     for package in package_list:
-      if Script.check_package_condition(package):
+      if self.check_package_condition(package):
         filtered_package_list.append(package)
     return filtered_package_list
 


[14/50] ambari git commit: AMBARI-18992: Stack version input text field on Register Version page should align properly even when resizing the page (dili)

Posted by sw...@apache.org.
AMBARI-18992: Stack version input text field on Register Version page should align properly even when resizing the page (dili)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/17d2ab1f
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/17d2ab1f
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/17d2ab1f

Branch: refs/heads/branch-feature-AMBARI-18901
Commit: 17d2ab1f9b0086d12b6be49167606e43741ae702
Parents: 1b63b90
Author: Di Li <di...@apache.org>
Authored: Mon Nov 28 15:09:54 2016 -0500
Committer: Di Li <di...@apache.org>
Committed: Mon Nov 28 15:16:47 2016 -0500

----------------------------------------------------------------------
 .../ui/admin-web/app/views/stackVersions/stackVersionPage.html     | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/17d2ab1f/ambari-admin/src/main/resources/ui/admin-web/app/views/stackVersions/stackVersionPage.html
----------------------------------------------------------------------
diff --git a/ambari-admin/src/main/resources/ui/admin-web/app/views/stackVersions/stackVersionPage.html b/ambari-admin/src/main/resources/ui/admin-web/app/views/stackVersions/stackVersionPage.html
index a614a0d..b31e2c5 100644
--- a/ambari-admin/src/main/resources/ui/admin-web/app/views/stackVersions/stackVersionPage.html
+++ b/ambari-admin/src/main/resources/ui/admin-web/app/views/stackVersions/stackVersionPage.html
@@ -83,7 +83,7 @@
                 </li>
               </ul>
             </div>
-            <div class="col-sm-7 pull-right form-inline repo-version-inline" ng-if="activeStackVersion.isNonXMLdata">
+            <div class="pull-right form-inline repo-version-inline" ng-if="activeStackVersion.isNonXMLdata">
               <label class="control-label col-sm-1 repo-version-label">Name:</label>
               <div class="col-sm-11">
                 <span class="control-label">


[02/50] ambari git commit: AMBARI-18986. Deployment failure when command does not have role (smohanty)

Posted by sw...@apache.org.
AMBARI-18986. Deployment failure when command does not have role (smohanty)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/7afe172d
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/7afe172d
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/7afe172d

Branch: refs/heads/branch-feature-AMBARI-18901
Commit: 7afe172d20c88abe6b51110364c5200ae1e29618
Parents: 92823a7
Author: Sumit Mohanty <sm...@hortonworks.com>
Authored: Thu Nov 24 10:15:30 2016 -0800
Committer: Sumit Mohanty <sm...@hortonworks.com>
Committed: Thu Nov 24 10:15:30 2016 -0800

----------------------------------------------------------------------
 .../main/python/ambari_agent/CustomServiceOrchestrator.py    | 8 ++++++--
 .../python/ambari_agent/TestCustomServiceOrchestrator.py     | 4 ++--
 2 files changed, 8 insertions(+), 4 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/7afe172d/ambari-agent/src/main/python/ambari_agent/CustomServiceOrchestrator.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/main/python/ambari_agent/CustomServiceOrchestrator.py b/ambari-agent/src/main/python/ambari_agent/CustomServiceOrchestrator.py
index b814334..770484d 100644
--- a/ambari-agent/src/main/python/ambari_agent/CustomServiceOrchestrator.py
+++ b/ambari-agent/src/main/python/ambari_agent/CustomServiceOrchestrator.py
@@ -42,6 +42,7 @@ class CustomServiceOrchestrator():
   """
 
   SCRIPT_TYPE_PYTHON = "PYTHON"
+  COMMAND_TYPE = "commandType"
   COMMAND_NAME_STATUS = "STATUS"
   COMMAND_NAME_SECURITY_STATUS = "SECURITY_STATUS"
   CUSTOM_ACTION_COMMAND = 'ACTIONEXECUTE'
@@ -58,7 +59,6 @@ class CustomServiceOrchestrator():
   AMBARI_SERVER_HOST = "ambari_server_host"
   AMBARI_SERVER_PORT = "ambari_server_port"
   AMBARI_SERVER_USE_SSL = "ambari_server_use_ssl"
-  METRICS_GRAFANA = "METRICS_GRAFANA"
 
   FREQUENT_COMMANDS = [COMMAND_NAME_SECURITY_STATUS, COMMAND_NAME_STATUS]
   DONT_DEBUG_FAILURES_FOR_COMMANDS = FREQUENT_COMMANDS
@@ -154,7 +154,11 @@ class CustomServiceOrchestrator():
         self.file_cache.get_host_scripts_base_dir(server_url_prefix)          
         hook_dir = self.file_cache.get_hook_base_dir(command, server_url_prefix)
         base_dir = self.file_cache.get_service_base_dir(command, server_url_prefix)
-        if command['role'] == self.METRICS_GRAFANA:
+        from ActionQueue import ActionQueue  # To avoid cyclic dependency
+        if self.COMMAND_TYPE in command and command[self.COMMAND_TYPE] == ActionQueue.EXECUTION_COMMAND:
+          logger.info("Found it - " + str(command[self.COMMAND_TYPE]) + " yeah")
+          # limiting to only EXECUTION_COMMANDs for now
+          # TODO need a design for limiting to specific role/component such as METRICS_GRAFANA
           self.file_cache.get_dashboard_base_dir(server_url_prefix)
 
         script_path = self.resolve_script_path(base_dir, script)

http://git-wip-us.apache.org/repos/asf/ambari/blob/7afe172d/ambari-agent/src/test/python/ambari_agent/TestCustomServiceOrchestrator.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/test/python/ambari_agent/TestCustomServiceOrchestrator.py b/ambari-agent/src/test/python/ambari_agent/TestCustomServiceOrchestrator.py
index d0d6ac4..2be6e1a 100644
--- a/ambari-agent/src/test/python/ambari_agent/TestCustomServiceOrchestrator.py
+++ b/ambari-agent/src/test/python/ambari_agent/TestCustomServiceOrchestrator.py
@@ -239,6 +239,7 @@ class TestCustomServiceOrchestrator(TestCase):
     
     FileCache_mock.return_value = None
     command = {
+      'commandType' : 'EXECUTION_COMMAND',
       'role' : 'REGION_SERVER',
       'hostLevelParams' : {
         'stack_name' : 'HDP',
@@ -277,8 +278,7 @@ class TestCustomServiceOrchestrator(TestCase):
     self.assertEqual(ret['exitcode'], 0)
     self.assertTrue(run_file_mock.called)
     self.assertEqual(run_file_mock.call_count, 3)
-    # Should only be called for METRICS_GRAFANA
-    self.assertFalse(get_dashboard_base_dir_mock.called)
+    self.assertTrue(get_dashboard_base_dir_mock.called)
 
     run_file_mock.reset_mock()
 


[23/50] ambari git commit: AMBARI-19016 - RegionServer Restart During Upgrade Fails Because of Missing Import When Formatting PID (jonathanhurley)

Posted by sw...@apache.org.
AMBARI-19016 - RegionServer Restart During Upgrade Fails Because of Missing Import When Formatting PID (jonathanhurley)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/6f4055fa
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/6f4055fa
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/6f4055fa

Branch: refs/heads/branch-feature-AMBARI-18901
Commit: 6f4055fa11d956113d7d6b95db762e4806e5dd81
Parents: 04c6b12
Author: Jonathan Hurley <jh...@hortonworks.com>
Authored: Tue Nov 29 08:40:58 2016 -0500
Committer: Jonathan Hurley <jh...@hortonworks.com>
Committed: Tue Nov 29 11:21:11 2016 -0500

----------------------------------------------------------------------
 .../common-services/HBASE/0.96.0.2.0/package/scripts/upgrade.py   | 3 ++-
 1 file changed, 2 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/6f4055fa/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/upgrade.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/upgrade.py b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/upgrade.py
index afdc244..43c7ff4 100644
--- a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/upgrade.py
+++ b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/upgrade.py
@@ -29,8 +29,10 @@ from resource_management.libraries.functions import conf_select, stack_select
 from resource_management.libraries.functions.constants import StackFeature
 from resource_management.libraries.functions.stack_features import check_stack_feature
 from resource_management.libraries.functions.decorator import retry
+from resource_management.libraries.functions.format import format
 from resource_management.libraries.functions import check_process_status
 
+
 def prestart(env, stack_component):
   import params
 
@@ -63,7 +65,6 @@ def is_regionserver_registered(cmd, user, hostname, regex_search_flags):
   Queries HBase through the HBase shell to see which servers have successfully registered. This is
   useful in cases, such as upgrades, where we must ensure that a RegionServer has not only started,
   but also completed it's registration handshake before moving into upgrading the next RegionServer.
-
   The hbase shell is used along with the "show 'simple'" command in order to determine if the
   specified host has registered.
   :param cmd:


[15/50] ambari git commit: AMBARI-18987 A general preupgrade check on if services cannot be upgrade are installed (dili)

Posted by sw...@apache.org.
AMBARI-18987 A general preupgrade check on if services cannot be upgrade are installed (dili)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/68a881ef
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/68a881ef
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/68a881ef

Branch: refs/heads/branch-feature-AMBARI-18901
Commit: 68a881ef1d713c0764c8546eff2007c4630a933e
Parents: 17d2ab1
Author: Di Li <di...@apache.org>
Authored: Mon Nov 28 15:51:11 2016 -0500
Committer: Di Li <di...@apache.org>
Committed: Mon Nov 28 16:03:02 2016 -0500

----------------------------------------------------------------------
 .../ambari/server/checks/CheckDescription.java  |  13 ++
 .../server/checks/ServicePresenceCheck.java     | 177 +++++++++++++++
 .../server/checks/ServicePresenceCheckTest.java | 217 +++++++++++++++++++
 3 files changed, 407 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/68a881ef/ambari-server/src/main/java/org/apache/ambari/server/checks/CheckDescription.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/checks/CheckDescription.java b/ambari-server/src/main/java/org/apache/ambari/server/checks/CheckDescription.java
index fbc4be1..7f24bf4 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/checks/CheckDescription.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/checks/CheckDescription.java
@@ -277,6 +277,19 @@ public class CheckDescription {
           "This service does not support upgrades and must be removed before the upgrade can continue. " +
           "After upgrading, Atlas can be reinstalled").build());
 
+  public static CheckDescription SERVICE_PRESENCE_CHECK = new CheckDescription("SERVICE_PRESENCE_CHECK",
+      PrereqCheckType.SERVICE,
+      "Service Is Not Supported For Upgrades",
+      new ImmutableMap.Builder<String, String>()
+        .put(AbstractCheckDescriptor.DEFAULT,
+            "The %s service is currently installed on the cluster. " +
+            "This service does not support upgrades and must be removed before the upgrade can continue. " +
+            "After upgrading, %s can be reinstalled")
+        .put(ServicePresenceCheck.KEY_SERVICE_REMOVED,
+            "The %s service is currently installed on the cluster. " +
+            "This service is removed from the new release and must be removed before the upgrade can continue. " +
+            "After upgrading, %s can be installed").build());
+
   public static CheckDescription RANGER_SERVICE_AUDIT_DB_CHECK = new CheckDescription("RANGER_SERVICE_AUDIT_DB_CHECK",
     PrereqCheckType.SERVICE,
     "Remove the Ranger Audit to Database Capability",

http://git-wip-us.apache.org/repos/asf/ambari/blob/68a881ef/ambari-server/src/main/java/org/apache/ambari/server/checks/ServicePresenceCheck.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/checks/ServicePresenceCheck.java b/ambari-server/src/main/java/org/apache/ambari/server/checks/ServicePresenceCheck.java
new file mode 100644
index 0000000..0f4eeb1
--- /dev/null
+++ b/ambari-server/src/main/java/org/apache/ambari/server/checks/ServicePresenceCheck.java
@@ -0,0 +1,177 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.server.checks;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Iterator;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+import org.apache.ambari.server.AmbariException;
+import org.apache.ambari.server.controller.PrereqCheckRequest;
+import org.apache.ambari.server.state.Cluster;
+import org.apache.ambari.server.state.stack.PrereqCheckStatus;
+import org.apache.ambari.server.state.stack.PrerequisiteCheck;
+import org.apache.ambari.server.state.stack.UpgradePack.PrerequisiteCheckConfig;
+import org.apache.commons.lang.StringUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.google.inject.Singleton;
+
+/**
+ * Checks if Atlas service is present. Upgrade to stack HDP 2.5 can't pursuit
+ * with existed on the cluster Atlas service.
+ */
+@Singleton
+@UpgradeCheck(group = UpgradeCheckGroup.DEFAULT)
+public class ServicePresenceCheck extends AbstractCheckDescriptor{
+
+  private static final Logger LOG = LoggerFactory.getLogger(ServicePresenceCheck.class);
+
+  static final String KEY_SERVICE_REMOVED = "servcie_removed";
+  /*
+   * List of services that do not support upgrade
+   * services must be removed before the stack upgrade
+   * */
+  static final String NO_UPGRADE_SUPPORT_SERVICES_PROPERTY_NAME = "no-upgrade-support-service-names";
+
+  /*
+   * List of services removed from the new release
+   * */
+  static final String REMOVED_SERVICES_PROPERTY_NAME = "removed-service-names";
+
+  /*
+   * Such as Spark to Spark2
+   */
+  static final String NEW_SERVICES_PROPERTY_NAME = "new-service-names";
+
+  public ServicePresenceCheck(){
+    super(CheckDescription.SERVICE_PRESENCE_CHECK);
+  }
+
+  @Override
+  public void perform(PrerequisiteCheck prerequisiteCheck, PrereqCheckRequest request) throws AmbariException {
+    final Cluster cluster = clustersProvider.get().getCluster(request.getClusterName());
+    Set<String> installedServices = cluster.getServices().keySet();
+
+    List<String> noUpgradeSupportServices = getNoUpgradeSupportServices(request);
+    Map<String, String> removedServices = getRemovedServices(request);
+    List<String> failReasons = new ArrayList<>();
+
+    if(null != noUpgradeSupportServices && !noUpgradeSupportServices.isEmpty()){
+      String reason = getFailReason(prerequisiteCheck, request);
+      for(String service: noUpgradeSupportServices){
+        if (installedServices.contains(service.toUpperCase())){
+          prerequisiteCheck.getFailedOn().add(service);
+          String msg = String.format(reason, service, service);
+          failReasons.add(msg);
+        }
+      }
+    }
+    if(null != removedServices){
+      String reason = getFailReason(KEY_SERVICE_REMOVED, prerequisiteCheck, request);
+      for (Map.Entry<String, String> entry : removedServices.entrySet()) {
+        String removedService = entry.getKey();
+        if(installedServices.contains(removedService.toUpperCase())){
+          prerequisiteCheck.getFailedOn().add(removedService);
+          String newService = entry.getValue();
+          String msg = String.format(reason, removedService, newService);
+          failReasons.add(msg);
+        }
+      }
+    }
+    if(!failReasons.isEmpty()){
+      prerequisiteCheck.setStatus(PrereqCheckStatus.FAIL);
+      prerequisiteCheck.setFailReason(StringUtils.join(failReasons, '\n'));
+    }
+  }
+
+  /**
+   * Obtain property value specified in the upgrade XML
+   * @return service name
+   * */
+  private String getPropertyValue(PrereqCheckRequest request, String propertyKey){
+    String value = null;
+    PrerequisiteCheckConfig prerequisiteCheckConfig = request.getPrerequisiteCheckConfig();
+    Map<String, String> checkProperties = null;
+    if(prerequisiteCheckConfig != null) {
+      checkProperties = prerequisiteCheckConfig.getCheckProperties(this.getClass().getName());
+    }
+    if(checkProperties != null && checkProperties.containsKey(propertyKey)) {
+      value = checkProperties.get(propertyKey);
+    }
+    return value;
+  }
+
+  /**
+   * @return service names
+   * */
+  private List<String> getNoUpgradeSupportServices(PrereqCheckRequest request){
+    String value = getPropertyValue(request, NO_UPGRADE_SUPPORT_SERVICES_PROPERTY_NAME);
+    if (null != value){
+      String[] services = value.split(",");
+      List<String> result = new ArrayList<String>();
+      for(String service: services){
+        service = service.trim();
+        if (!service.isEmpty()){
+          result.add(service);
+        }
+      }
+      return result;
+    } else {
+      return null;
+    }
+  }
+
+  /**
+   * @return service names and new service names map
+   * */
+  private Map<String, String> getRemovedServices(PrereqCheckRequest request) throws AmbariException{
+    String value = getPropertyValue(request, REMOVED_SERVICES_PROPERTY_NAME);
+    String newValue = getPropertyValue(request, NEW_SERVICES_PROPERTY_NAME);
+    if(value == null && newValue == null){
+      return null; //no need to check removed services as they are not specified in the upgrade xml file.
+    } else {
+      if (value == null || newValue == null){
+        throw new AmbariException("Removed services must be paired with new services list.");
+      } else {
+        List<String> oldServices = Arrays.asList(value.split(","));
+        List<String> newServices = Arrays.asList(newValue.split(","));
+        if (oldServices.size() != newServices.size()){
+          throw new AmbariException("Removed services must be paired with new services list.");
+        } else {
+          Map<String, String> result = new LinkedHashMap<String, String>();
+          for (int i = 0; i < oldServices.size(); i++){
+            String oldService = oldServices.get(i).trim();
+            String newService = newServices.get(i).trim();
+            if (oldService.isEmpty() || newService.isEmpty()) {
+              throw new AmbariException("Removed services must be paired with new services list.");
+            } else {
+              result.put(oldService, newService);
+            }
+          }
+          return result;
+        }
+      }
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/68a881ef/ambari-server/src/test/java/org/apache/ambari/server/checks/ServicePresenceCheckTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/checks/ServicePresenceCheckTest.java b/ambari-server/src/test/java/org/apache/ambari/server/checks/ServicePresenceCheckTest.java
new file mode 100644
index 0000000..03b0e81
--- /dev/null
+++ b/ambari-server/src/test/java/org/apache/ambari/server/checks/ServicePresenceCheckTest.java
@@ -0,0 +1,217 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.server.checks;
+
+import java.util.HashMap;
+import java.util.Map;
+
+import org.apache.ambari.server.controller.PrereqCheckRequest;
+import org.apache.ambari.server.state.Cluster;
+import org.apache.ambari.server.state.Clusters;
+import org.apache.ambari.server.state.Service;
+import org.apache.ambari.server.state.stack.PrereqCheckStatus;
+import org.apache.ambari.server.state.stack.PrerequisiteCheck;
+import org.apache.ambari.server.state.stack.UpgradePack.PrerequisiteCheckConfig;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+import org.mockito.Mockito;
+
+import com.google.inject.Provider;
+
+/**
+ * Tests for {@link ServicePresenceCheck}
+ */
+public class ServicePresenceCheckTest {
+  private final Clusters m_clusters = Mockito.mock(Clusters.class);
+
+  private final ServicePresenceCheck m_check = new ServicePresenceCheck();
+
+  /**
+   *
+   */
+  @Before
+  public void setup() {
+    m_check.clustersProvider = new Provider<Clusters>() {
+
+      @Override
+      public Clusters get() {
+        return m_clusters;
+      }
+    };
+  }
+
+  @Test
+  public void testPerformPass() throws Exception {
+    final Cluster cluster = Mockito.mock(Cluster.class);
+    Mockito.when(cluster.getClusterId()).thenReturn(1L);
+    Mockito.when(m_clusters.getCluster("cluster")).thenReturn(cluster);
+
+    Map<String, String> checkProperties = new HashMap<String, String>();
+    checkProperties.put(ServicePresenceCheck.NO_UPGRADE_SUPPORT_SERVICES_PROPERTY_NAME,"MyServiceOne, MyServiceTwo");
+    checkProperties.put(ServicePresenceCheck.REMOVED_SERVICES_PROPERTY_NAME,"OldServiceOne, OldServiceTwo");
+    checkProperties.put(ServicePresenceCheck.NEW_SERVICES_PROPERTY_NAME,"NewServiceOne, NewServiceTwo");
+
+    PrerequisiteCheckConfig prerequisiteCheckConfig = Mockito.mock(PrerequisiteCheckConfig.class);
+    Mockito.when(prerequisiteCheckConfig.getCheckProperties(
+        m_check.getClass().getName())).thenReturn(checkProperties);
+
+    PrerequisiteCheck check = new PrerequisiteCheck(null, null);
+    PrereqCheckRequest request = new PrereqCheckRequest("cluster");
+    request.setRepositoryVersion("2.5.0.0");
+    request.setPrerequisiteCheckConfig(prerequisiteCheckConfig);
+
+    m_check.perform(check, request);
+    Assert.assertEquals(PrereqCheckStatus.PASS, check.getStatus());
+  }
+
+  @Test
+  public void testPerformHasNoUpgradeSupportServices() throws Exception {
+    final Cluster cluster = Mockito.mock(Cluster.class);
+    Mockito.when(cluster.getClusterId()).thenReturn(1L);
+    Mockito.when(m_clusters.getCluster("cluster")).thenReturn(cluster);
+
+    Map<String, Service> services = new HashMap<String, Service>();
+    services.put("ATLAS", Mockito.mock(Service.class));
+    Mockito.when(cluster.getServices()).thenReturn(services);
+
+    Map<String, String> checkProperties = new HashMap<String, String>();
+    checkProperties.put(ServicePresenceCheck.NO_UPGRADE_SUPPORT_SERVICES_PROPERTY_NAME,"Atlas, MyService");
+    PrerequisiteCheckConfig prerequisiteCheckConfig = Mockito.mock(PrerequisiteCheckConfig.class);
+    Mockito.when(prerequisiteCheckConfig.getCheckProperties(
+        m_check.getClass().getName())).thenReturn(checkProperties);
+
+    PrerequisiteCheck check = new PrerequisiteCheck(null, null);
+    PrereqCheckRequest request = new PrereqCheckRequest("cluster");
+    request.setPrerequisiteCheckConfig(prerequisiteCheckConfig);
+
+    m_check.perform(check, request);
+    Assert.assertEquals(PrereqCheckStatus.FAIL, check.getStatus());
+  }
+
+  @Test
+  public void testPerformHasRemovedServices() throws Exception {
+    final Cluster cluster = Mockito.mock(Cluster.class);
+    Mockito.when(cluster.getClusterId()).thenReturn(1L);
+    Mockito.when(m_clusters.getCluster("cluster")).thenReturn(cluster);
+
+    Map<String, Service> services = new HashMap<String, Service>();
+    services.put("ATLAS", Mockito.mock(Service.class));
+    services.put("OLDSERVICE", Mockito.mock(Service.class));
+    Mockito.when(cluster.getServices()).thenReturn(services);
+
+    Map<String, String> checkProperties = new HashMap<String, String>();
+    checkProperties.put(ServicePresenceCheck.REMOVED_SERVICES_PROPERTY_NAME,"Atlas, OldService");
+    checkProperties.put(ServicePresenceCheck.NEW_SERVICES_PROPERTY_NAME,"Atlas2, NewService");
+
+    PrerequisiteCheckConfig prerequisiteCheckConfig = Mockito.mock(PrerequisiteCheckConfig.class);
+    Mockito.when(prerequisiteCheckConfig.getCheckProperties(
+        m_check.getClass().getName())).thenReturn(checkProperties);
+
+    PrerequisiteCheck check = new PrerequisiteCheck(null, null);
+    PrereqCheckRequest request = new PrereqCheckRequest("cluster");
+    request.setPrerequisiteCheckConfig(prerequisiteCheckConfig);
+
+    m_check.perform(check, request);
+    Assert.assertEquals(PrereqCheckStatus.FAIL, check.getStatus());
+  }
+
+  @Test
+  public void testPerformMixOne() throws Exception {
+    final Cluster cluster = Mockito.mock(Cluster.class);
+    Mockito.when(cluster.getClusterId()).thenReturn(1L);
+    Mockito.when(m_clusters.getCluster("cluster")).thenReturn(cluster);
+
+    Map<String, Service> services = new HashMap<String, Service>();
+    services.put("ATLAS", Mockito.mock(Service.class));
+    Mockito.when(cluster.getServices()).thenReturn(services);
+
+    Map<String, String> checkProperties = new HashMap<String, String>();
+    checkProperties.put(ServicePresenceCheck.NO_UPGRADE_SUPPORT_SERVICES_PROPERTY_NAME,"MyServiceOne, MyServiceTwo");
+    checkProperties.put(ServicePresenceCheck.REMOVED_SERVICES_PROPERTY_NAME,"Atlas, OldService");
+    checkProperties.put(ServicePresenceCheck.NEW_SERVICES_PROPERTY_NAME,"Atlas2, NewService");
+
+    PrerequisiteCheckConfig prerequisiteCheckConfig = Mockito.mock(PrerequisiteCheckConfig.class);
+    Mockito.when(prerequisiteCheckConfig.getCheckProperties(
+        m_check.getClass().getName())).thenReturn(checkProperties);
+
+    PrerequisiteCheck check = new PrerequisiteCheck(null, null);
+    PrereqCheckRequest request = new PrereqCheckRequest("cluster");
+    request.setPrerequisiteCheckConfig(prerequisiteCheckConfig);
+
+    m_check.perform(check, request);
+    Assert.assertEquals(PrereqCheckStatus.FAIL, check.getStatus());
+  }
+
+  @Test
+  public void testPerformMixTwo() throws Exception {
+    final Cluster cluster = Mockito.mock(Cluster.class);
+    Mockito.when(cluster.getClusterId()).thenReturn(1L);
+    Mockito.when(m_clusters.getCluster("cluster")).thenReturn(cluster);
+
+    Map<String, Service> services = new HashMap<String, Service>();
+    services.put("OLDSERVICE", Mockito.mock(Service.class));
+    Mockito.when(cluster.getServices()).thenReturn(services);
+
+    Map<String, String> checkProperties = new HashMap<String, String>();
+    checkProperties.put(ServicePresenceCheck.NO_UPGRADE_SUPPORT_SERVICES_PROPERTY_NAME,"Atlas, MyService");
+    checkProperties.put(ServicePresenceCheck.REMOVED_SERVICES_PROPERTY_NAME,"OldService");
+    checkProperties.put(ServicePresenceCheck.NEW_SERVICES_PROPERTY_NAME,"NewService");
+
+    PrerequisiteCheckConfig prerequisiteCheckConfig = Mockito.mock(PrerequisiteCheckConfig.class);
+    Mockito.when(prerequisiteCheckConfig.getCheckProperties(
+        m_check.getClass().getName())).thenReturn(checkProperties);
+
+    PrerequisiteCheck check = new PrerequisiteCheck(null, null);
+    PrereqCheckRequest request = new PrereqCheckRequest("cluster");
+    request.setPrerequisiteCheckConfig(prerequisiteCheckConfig);
+
+    m_check.perform(check, request);
+    Assert.assertEquals(PrereqCheckStatus.FAIL, check.getStatus());
+  }
+
+  @Test
+  public void testPerformMixThree() throws Exception {
+    final Cluster cluster = Mockito.mock(Cluster.class);
+    Mockito.when(cluster.getClusterId()).thenReturn(1L);
+    Mockito.when(m_clusters.getCluster("cluster")).thenReturn(cluster);
+
+    Map<String, Service> services = new HashMap<String, Service>();
+    services.put("ATLAS", Mockito.mock(Service.class));
+    services.put("HDFS", Mockito.mock(Service.class));
+    services.put("STORM", Mockito.mock(Service.class));
+    services.put("RANGER", Mockito.mock(Service.class));
+    Mockito.when(cluster.getServices()).thenReturn(services);
+
+    Map<String, String> checkProperties = new HashMap<String, String>();
+    checkProperties.put(ServicePresenceCheck.NO_UPGRADE_SUPPORT_SERVICES_PROPERTY_NAME,"Atlas, HDFS");
+    checkProperties.put(ServicePresenceCheck.REMOVED_SERVICES_PROPERTY_NAME,"Storm, Ranger");
+    checkProperties.put(ServicePresenceCheck.NEW_SERVICES_PROPERTY_NAME,"Storm2, Ranger2");
+
+    PrerequisiteCheckConfig prerequisiteCheckConfig = Mockito.mock(PrerequisiteCheckConfig.class);
+    Mockito.when(prerequisiteCheckConfig.getCheckProperties(
+        m_check.getClass().getName())).thenReturn(checkProperties);
+
+    PrerequisiteCheck check = new PrerequisiteCheck(null, null);
+    PrereqCheckRequest request = new PrereqCheckRequest("cluster");
+    request.setPrerequisiteCheckConfig(prerequisiteCheckConfig);
+
+    m_check.perform(check, request);
+    Assert.assertEquals(PrereqCheckStatus.FAIL, check.getStatus());
+  }
+}


[22/50] ambari git commit: AMBARI-19009. Remove Spark History Server dependency Zeppelin (magyari_sandor)

Posted by sw...@apache.org.
AMBARI-19009. Remove Spark History Server dependency Zeppelin (magyari_sandor)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/04c6b122
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/04c6b122
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/04c6b122

Branch: refs/heads/branch-feature-AMBARI-18901
Commit: 04c6b12232179522ccbe7f70e1c69534e3651b5f
Parents: c295941
Author: Sandor Magyari <sm...@hortonworks.com>
Authored: Tue Nov 29 16:33:03 2016 +0100
Committer: Sandor Magyari <sm...@hortonworks.com>
Committed: Tue Nov 29 16:33:03 2016 +0100

----------------------------------------------------------------------
 .../src/main/resources/stacks/HDP/2.5/role_command_order.json      | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/04c6b122/ambari-server/src/main/resources/stacks/HDP/2.5/role_command_order.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.5/role_command_order.json b/ambari-server/src/main/resources/stacks/HDP/2.5/role_command_order.json
index 0e1319a..f9207f6 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.5/role_command_order.json
+++ b/ambari-server/src/main/resources/stacks/HDP/2.5/role_command_order.json
@@ -3,7 +3,7 @@
   "_comment" : "blockedRole-blockedCommand: [blockerRole1-blockerCommand1, blockerRole2-blockerCommand2, ...]",
   "general_deps" : {
     "_comment" : "dependencies for all cases",
-    "ZEPPELIN_MASTER-START" : ["NAMENODE-START", "SPARK_JOBHISTORYSERVER-START"],
+    "ZEPPELIN_MASTER-START" : ["NAMENODE-START"],
     "ZEPPELIN_SERVICE_CHECK-SERVICE_CHECK" : ["ZEPPELIN_MASTER-START"],
     "HIVE_SERVER_INTERACTIVE-START": ["RESOURCEMANAGER-START", "NODEMANAGER-START", "MYSQL_SERVER-START"],
     "RESOURCEMANAGER-STOP": ["HIVE_SERVER_INTERACTIVE-STOP", "SPARK_THRIFTSERVER-STOP", "SPARK2_THRIFTSERVER-STOP"],


[07/50] ambari git commit: AMBARI-18976. Config History request execution time depends on config versions count. (mpapirkovskyy)

Posted by sw...@apache.org.
AMBARI-18976. Config History request execution time depends on config versions count. (mpapirkovskyy)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/ce4d4fa7
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/ce4d4fa7
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/ce4d4fa7

Branch: refs/heads/branch-feature-AMBARI-18901
Commit: ce4d4fa768375aac631703dbfceac44fd8faa151
Parents: 59d11cf
Author: Myroslav Papirkovskyi <mp...@hortonworks.com>
Authored: Wed Nov 23 19:03:14 2016 +0200
Committer: Myroslav Papirkovskyi <mp...@hortonworks.com>
Committed: Mon Nov 28 18:39:06 2016 +0200

----------------------------------------------------------------------
 .../server/state/cluster/ClusterImpl.java       | 42 +++++---------------
 1 file changed, 10 insertions(+), 32 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/ce4d4fa7/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClusterImpl.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClusterImpl.java b/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClusterImpl.java
index 8b157c7..7bf24ce 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClusterImpl.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClusterImpl.java
@@ -2534,12 +2534,10 @@ public class ClusterImpl implements Cluster {
     if (serviceConfigEntity.getGroupId() == null) {
       Collection<String> configTypes = serviceConfigTypes.get(serviceName);
       List<ClusterConfigMappingEntity> mappingEntities =
-          clusterDAO.getClusterConfigMappingEntitiesByCluster(getClusterId());
+          clusterDAO.getSelectedConfigMappingByTypes(getClusterId(), new ArrayList<>(configTypes));
       for (ClusterConfigMappingEntity entity : mappingEntities) {
-        if (configTypes.contains(entity.getType()) && entity.isSelected() > 0) {
-          entity.setSelected(0);
-          entity = clusterDAO.mergeConfigMapping(entity);
-        }
+        entity.setSelected(0);
+        clusterDAO.mergeConfigMapping(entity);
       }
 
       for (ClusterConfigEntity configEntity : serviceConfigEntity.getClusterConfigEntities()) {
@@ -2599,14 +2597,12 @@ public class ClusterImpl implements Cluster {
   @Transactional
   void selectConfig(String type, String tag, String user) {
     Collection<ClusterConfigMappingEntity> entities =
-        clusterDAO.getClusterConfigMappingEntitiesByCluster(getClusterId());
+      clusterDAO.getLatestClusterConfigMappingsEntityByType(getClusterId(), type);
 
     //disable previous config
     for (ClusterConfigMappingEntity e : entities) {
-      if (e.isSelected() > 0 && e.getType().equals(type)) {
-        e.setSelected(0);
-        e = clusterDAO.mergeConfigMapping(e);
-      }
+      e.setSelected(0);
+      clusterDAO.mergeConfigMapping(e);
     }
 
     ClusterEntity clusterEntity = getClusterEntity();
@@ -2672,32 +2668,15 @@ public class ClusterImpl implements Cluster {
   }
 
   private List<ClusterConfigEntity> getClusterConfigEntitiesByService(String serviceName) {
-    List<ClusterConfigEntity> configEntities = new ArrayList<ClusterConfigEntity>();
-
-    //add configs from this service
     Collection<String> configTypes = serviceConfigTypes.get(serviceName);
-    for (ClusterConfigMappingEntity mappingEntity : clusterDAO.getClusterConfigMappingEntitiesByCluster(getClusterId())) {
-      if (mappingEntity.isSelected() > 0 && configTypes.contains(mappingEntity.getType())) {
-        ClusterConfigEntity configEntity =
-          clusterDAO.findConfig(getClusterId(), mappingEntity.getType(), mappingEntity.getTag());
-        if (configEntity != null) {
-          configEntities.add(configEntity);
-        } else {
-          LOG.error("Desired cluster config type={}, tag={} is not present in database," +
-            " unable to add to service config version");
-        }
-      }
-    }
-    return configEntities;
+    return clusterDAO.getLatestClusterConfigsByTypes(getClusterId(), new ArrayList<>(configTypes));
   }
 
   @Override
   public Config getDesiredConfigByType(String configType) {
-    for (ClusterConfigMappingEntity e : clusterDAO.getClusterConfigMappingEntitiesByCluster(
-        getClusterId())) {
-      if (e.isSelected() > 0 && e.getType().equals(configType)) {
-        return getConfig(e.getType(), e.getTag());
-      }
+    List<ClusterConfigMappingEntity> entities = clusterDAO.getLatestClusterConfigMappingsEntityByType(getClusterId(), configType);
+    if (!entities.isEmpty()) {
+      return getConfig(configType, entities.get(0).getTag());
     }
 
     return null;
@@ -3454,5 +3433,4 @@ public class ClusterImpl implements Cluster {
 
     m_clusterPropertyCache.clear();
   }
-
 }


[09/50] ambari git commit: Revert "temp scv"

Posted by sw...@apache.org.
Revert "temp scv"

This reverts commit 59d11cf36b9fa283ba9592c8440f6be53debadd0.


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/1df9b466
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/1df9b466
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/1df9b466

Branch: refs/heads/branch-feature-AMBARI-18901
Commit: 1df9b46615710a2232eb38e975fbcd6f9dec8cb9
Parents: ce4d4fa
Author: Myroslav Papirkovskyi <mp...@hortonworks.com>
Authored: Mon Nov 28 18:41:12 2016 +0200
Committer: Myroslav Papirkovskyi <mp...@hortonworks.com>
Committed: Mon Nov 28 18:41:12 2016 +0200

----------------------------------------------------------------------
 .../internal/ClusterControllerImpl.java         |  2 +-
 .../ServiceConfigVersionResourceProvider.java   |  3 +--
 .../logging/LoggingSearchPropertyProvider.java  | 11 ++-------
 .../ambari/server/orm/dao/ClusterDAO.java       | 24 --------------------
 .../ambari/server/orm/dao/ServiceConfigDAO.java | 13 ++++++-----
 .../orm/entities/ClusterConfigEntity.java       |  7 +-----
 .../entities/ClusterConfigMappingEntity.java    |  6 -----
 .../orm/entities/ServiceConfigEntity.java       |  5 ++--
 8 files changed, 14 insertions(+), 57 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/1df9b466/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClusterControllerImpl.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClusterControllerImpl.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClusterControllerImpl.java
index c752e80..32bed7b 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClusterControllerImpl.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClusterControllerImpl.java
@@ -858,7 +858,7 @@ public class ClusterControllerImpl implements ClusterController {
       if (compVal == 0) {
         Schema schema = getSchema(resourceType);
 
-        for (Type type : schema.getKeyTypes()) {
+        for (Type type : Type.values()) {
           String keyPropertyId = schema.getKeyPropertyId(type);
           if (keyPropertyId != null) {
             compVal = compareValues(resource1.getPropertyValue(keyPropertyId),

http://git-wip-us.apache.org/repos/asf/ambari/blob/1df9b466/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ServiceConfigVersionResourceProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ServiceConfigVersionResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ServiceConfigVersionResourceProvider.java
index e5ca389..2edbe9b 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ServiceConfigVersionResourceProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ServiceConfigVersionResourceProvider.java
@@ -91,9 +91,8 @@ public class ServiceConfigVersionResourceProvider extends
     PROPERTY_IDS.add(SERVICE_CONFIG_VERSION_IS_COMPATIBLE_PROPERTY_ID);
 
     // keys
-    KEY_PROPERTY_IDS.put(Resource.Type.Service,SERVICE_CONFIG_VERSION_SERVICE_NAME_PROPERTY_ID);
+    KEY_PROPERTY_IDS.put(Resource.Type.ServiceConfigVersion,SERVICE_CONFIG_VERSION_SERVICE_NAME_PROPERTY_ID);
     KEY_PROPERTY_IDS.put(Resource.Type.Cluster,SERVICE_CONFIG_VERSION_CLUSTER_NAME_PROPERTY_ID);
-    KEY_PROPERTY_IDS.put(Resource.Type.ServiceConfigVersion,SERVICE_CONFIG_VERSION_PROPERTY_ID);
   }
 
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/1df9b466/ambari-server/src/main/java/org/apache/ambari/server/controller/logging/LoggingSearchPropertyProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/logging/LoggingSearchPropertyProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/logging/LoggingSearchPropertyProvider.java
index 16788ed..d9db290 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/logging/LoggingSearchPropertyProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/logging/LoggingSearchPropertyProvider.java
@@ -39,10 +39,8 @@ import org.apache.log4j.Logger;
 
 import java.util.Collections;
 import java.util.EnumSet;
-import java.util.HashMap;
 import java.util.LinkedList;
 import java.util.List;
-import java.util.Map;
 import java.util.Set;
 import java.util.concurrent.atomic.AtomicInteger;
 
@@ -75,7 +73,7 @@ public class LoggingSearchPropertyProvider implements PropertyProvider {
 
   @Override
   public Set<Resource> populateResources(Set<Resource> resources, Request request, Predicate predicate) throws SystemException {
-    Map<String, Boolean> isLogSearchRunning = new HashMap<>();
+
     for (Resource resource : resources) {
       // obtain the required identifying properties on the host component resource
       final String componentName = (String)resource.getPropertyValue(PropertyHelper.getPropertyId("HostRoles", "component_name"));
@@ -92,12 +90,7 @@ public class LoggingSearchPropertyProvider implements PropertyProvider {
         continue;
       }
 
-      Boolean isLogSearchRunningForSpecifiedCluster = isLogSearchRunning.get(clusterName);
-      if (isLogSearchRunningForSpecifiedCluster == null) {
-        isLogSearchRunningForSpecifiedCluster = logSearchServerRunning(clusterName);
-        isLogSearchRunning.put(clusterName, isLogSearchRunningForSpecifiedCluster);
-      }
-      if (!isLogSearchRunningForSpecifiedCluster) {
+      if (!logSearchServerRunning(clusterName)) {
         continue;
       }
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/1df9b466/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/ClusterDAO.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/ClusterDAO.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/ClusterDAO.java
index b727c72..e93ac0e 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/ClusterDAO.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/ClusterDAO.java
@@ -116,18 +116,6 @@ public class ClusterDAO {
   }
 
   @RequiresSession
-  public List<ClusterConfigEntity> getLatestClusterConfigsByTypes(Long clusterId, List<String> types) {
-    TypedQuery<ClusterConfigEntity> query = entityManagerProvider.get().createNamedQuery(
-      "ClusterConfigEntity.findLatestClusterConfigsByTypes",
-      ClusterConfigEntity.class);
-
-    query.setParameter("clusterId", clusterId);
-    query.setParameter("types", types);
-
-    return daoUtils.selectList(query);
-  }
-
-  @RequiresSession
   public ClusterConfigEntity findConfig(Long clusterId, String type, Long version) {
     CriteriaBuilder cb = entityManagerProvider.get().getCriteriaBuilder();
     CriteriaQuery<ClusterConfigEntity> cq = cb.createQuery(ClusterConfigEntity.class);
@@ -252,18 +240,6 @@ public class ClusterDAO {
     return daoUtils.selectList(query);
   }
 
-  @RequiresSession
-  public List<ClusterConfigMappingEntity> getLatestClusterConfigMappingsEntityByType(long clusterId, String configType) {
-    TypedQuery<ClusterConfigMappingEntity> query = entityManagerProvider.get().createNamedQuery(
-      "ClusterConfigMappingEntity.findLatestClusterConfigMappingsByType",
-      ClusterConfigMappingEntity.class);
-
-    query.setParameter("clusterId", clusterId);
-    query.setParameter("typeName", configType);
-
-    return daoUtils.selectList(query);
-  }
-
   /**
    * Gets selected mappings for provided config types
    * @param clusterId cluster id

http://git-wip-us.apache.org/repos/asf/ambari/blob/1df9b466/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/ServiceConfigDAO.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/ServiceConfigDAO.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/ServiceConfigDAO.java
index 212a9f0..2cbee8d 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/ServiceConfigDAO.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/ServiceConfigDAO.java
@@ -115,13 +115,14 @@ public class ServiceConfigDAO {
 
   @RequiresSession
   public List<ServiceConfigEntity> getLastServiceConfigs(Long clusterId) {
-    TypedQuery<ServiceConfigEntity> query = entityManagerProvider.get().createNamedQuery(
-      "ServiceConfigEntity.findLatestServiceConfigsByCluster",
-      ServiceConfigEntity.class);
-
-    query.setParameter("clusterId", clusterId);
+    TypedQuery<ServiceConfigEntity> query = entityManagerProvider.get().
+      createQuery("SELECT scv FROM ServiceConfigEntity scv " +
+        "WHERE scv.clusterId = ?1 AND scv.createTimestamp = (" +
+        "SELECT MAX(scv2.createTimestamp) FROM ServiceConfigEntity scv2 " +
+        "WHERE scv2.serviceName = scv.serviceName AND scv2.clusterId = ?1 AND scv2.groupId IS NULL)",
+        ServiceConfigEntity.class);
 
-    return daoUtils.selectList(query);
+    return daoUtils.selectList(query, clusterId);
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/ambari/blob/1df9b466/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ClusterConfigEntity.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ClusterConfigEntity.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ClusterConfigEntity.java
index 937e872..bf0faee 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ClusterConfigEntity.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ClusterConfigEntity.java
@@ -55,12 +55,7 @@ import javax.persistence.UniqueConstraint;
     @NamedQuery(name = "ClusterConfigEntity.findClusterConfigMappingsByStack",
       query = "SELECT mapping FROM ClusterConfigMappingEntity mapping " +
         "JOIN ClusterConfigEntity config ON mapping.typeName = config.type AND mapping.tag = config.tag " +
-        "WHERE mapping.clusterId = :clusterId AND config.stack = :stack"),
-    @NamedQuery(name = "ClusterConfigEntity.findLatestClusterConfigsByTypes",
-      query = "SELECT cc FROM ClusterConfigEntity cc " +
-        "JOIN ClusterConfigMappingEntity ccm " +
-        "ON cc.clusterId = ccm.clusterId AND cc.type = ccm.typeName AND cc.tag = ccm.tag " +
-        "WHERE cc.clusterId = :clusterId AND ccm.selectedInd > 0 AND ccm.typeName IN :types")
+        "WHERE mapping.clusterId = :clusterId AND config.stack = :stack")
 })
 
 public class ClusterConfigEntity {

http://git-wip-us.apache.org/repos/asf/ambari/blob/1df9b466/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ClusterConfigMappingEntity.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ClusterConfigMappingEntity.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ClusterConfigMappingEntity.java
index 04c6030..c3c3e9e 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ClusterConfigMappingEntity.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ClusterConfigMappingEntity.java
@@ -23,8 +23,6 @@ import javax.persistence.Id;
 import javax.persistence.IdClass;
 import javax.persistence.JoinColumn;
 import javax.persistence.ManyToOne;
-import javax.persistence.NamedQueries;
-import javax.persistence.NamedQuery;
 import javax.persistence.Table;
 
 /**
@@ -33,10 +31,6 @@ import javax.persistence.Table;
 @Table(name = "clusterconfigmapping")
 @Entity
 @IdClass(ClusterConfigMappingEntityPK.class)
-@NamedQueries({
-  @NamedQuery(name = "ClusterConfigMappingEntity.findLatestClusterConfigMappingsByType",
-    query = "SELECT mapping FROM ClusterConfigMappingEntity mapping WHERE mapping.clusterId = :clusterId AND mapping.selectedInd > 0 AND mapping.typeName = :typeName")})
-
 public class ClusterConfigMappingEntity {
 
   @Id

http://git-wip-us.apache.org/repos/asf/ambari/blob/1df9b466/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ServiceConfigEntity.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ServiceConfigEntity.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ServiceConfigEntity.java
index 0df6f68..7c28835 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ServiceConfigEntity.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ServiceConfigEntity.java
@@ -51,8 +51,7 @@ import javax.persistence.TableGenerator;
     @NamedQuery(name = "ServiceConfigEntity.findNextServiceConfigVersion", query = "SELECT COALESCE(MAX(serviceConfig.version), 0) + 1 AS nextVersion FROM ServiceConfigEntity serviceConfig WHERE serviceConfig.serviceName=:serviceName AND serviceConfig.clusterId=:clusterId"),
     @NamedQuery(name = "ServiceConfigEntity.findAllServiceConfigsByStack", query = "SELECT serviceConfig FROM ServiceConfigEntity serviceConfig WHERE serviceConfig.clusterId=:clusterId AND serviceConfig.stack=:stack"),
     @NamedQuery(name = "ServiceConfigEntity.findLatestServiceConfigsByStack", query = "SELECT serviceConfig FROM ServiceConfigEntity serviceConfig WHERE serviceConfig.clusterId = :clusterId AND serviceConfig.version = (SELECT MAX(serviceConfig2.version) FROM ServiceConfigEntity serviceConfig2 WHERE serviceConfig2.clusterId=:clusterId AND serviceConfig2.stack=:stack AND serviceConfig2.serviceName = serviceConfig.serviceName)"),
-    @NamedQuery(name = "ServiceConfigEntity.findLatestServiceConfigsByService", query = "SELECT scv FROM ServiceConfigEntity scv WHERE scv.clusterId = :clusterId AND scv.serviceName = :serviceName AND scv.version = (SELECT MAX(scv2.version) FROM ServiceConfigEntity scv2 WHERE (scv2.serviceName = :serviceName AND scv2.clusterId = :clusterId) AND (scv2.groupId = scv.groupId OR (scv2.groupId IS NULL AND scv.groupId IS NULL)))"),
-    @NamedQuery(name = "ServiceConfigEntity.findLatestServiceConfigsByCluster", query = "SELECT scv FROM ServiceConfigEntity scv WHERE scv.clusterId = :clusterId AND scv.serviceConfigId IN (SELECT MAX(scv1.serviceConfigId) FROM ServiceConfigEntity scv1 WHERE (scv1.clusterId = :clusterId) AND (scv1.groupId IS NULL) GROUP BY scv1.serviceName)")})
+    @NamedQuery(name = "ServiceConfigEntity.findLatestServiceConfigsByService", query = "SELECT scv FROM ServiceConfigEntity scv WHERE scv.clusterId = :clusterId AND scv.serviceName = :serviceName AND scv.version = (SELECT MAX(scv2.version) FROM ServiceConfigEntity scv2 WHERE (scv2.serviceName = :serviceName AND scv2.clusterId = :clusterId) AND (scv2.groupId = scv.groupId OR (scv2.groupId IS NULL AND scv.groupId IS NULL)))")})
 public class ServiceConfigEntity {
   @Id
   @Column(name = "service_config_id")
@@ -264,4 +263,4 @@ public class ServiceConfigEntity {
     }
     return true;
   }
-}
+}
\ No newline at end of file


[31/50] ambari git commit: AMBARI-19022: Ambari-server: Remove credential store from KNOX stack definition since it does not support credential store

Posted by sw...@apache.org.
AMBARI-19022: Ambari-server: Remove credential store from KNOX stack definition since it does not support credential store


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/8036eb24
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/8036eb24
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/8036eb24

Branch: refs/heads/branch-feature-AMBARI-18901
Commit: 8036eb247691f7e5278ab13bb36b60eaac4973df
Parents: 40fde4a
Author: Nahappan Somasundaram <ns...@hortonworks.com>
Authored: Tue Nov 29 14:44:39 2016 -0800
Committer: Nahappan Somasundaram <ns...@hortonworks.com>
Committed: Tue Nov 29 20:53:04 2016 -0800

----------------------------------------------------------------------
 .../src/main/resources/stacks/HDP/2.5/services/KNOX/metainfo.xml | 4 ----
 1 file changed, 4 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/8036eb24/ambari-server/src/main/resources/stacks/HDP/2.5/services/KNOX/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.5/services/KNOX/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.5/services/KNOX/metainfo.xml
index 01af67c..8f303a5 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.5/services/KNOX/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.5/services/KNOX/metainfo.xml
@@ -21,10 +21,6 @@
     <service>
       <name>KNOX</name>
       <version>0.9.0.2.5</version>
-      <credential-store>
-        <supported>true</supported>
-        <enabled>false</enabled>
-      </credential-store>
     </service>
   </services>
 </metainfo>


[44/50] ambari git commit: AMBARI-18600: Workflow Designer View: When logs are empty, you see the spinner, instead of a message (sangeetar)

Posted by sw...@apache.org.
AMBARI-18600: Workflow Designer View: When logs are empty, you see the spinner, instead of a message (sangeetar)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/47d2f707
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/47d2f707
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/47d2f707

Branch: refs/heads/branch-feature-AMBARI-18901
Commit: 47d2f707499275a35269f7335182e5c8715d7f8b
Parents: 037ed63
Author: Sangeeta Ravindran <sa...@apache.org>
Authored: Wed Nov 30 13:09:19 2016 -0800
Committer: Sangeeta Ravindran <sa...@apache.org>
Committed: Wed Nov 30 13:09:35 2016 -0800

----------------------------------------------------------------------
 .../wfmanager/src/main/resources/ui/app/components/job-details.js | 3 +++
 1 file changed, 3 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/47d2f707/contrib/views/wfmanager/src/main/resources/ui/app/components/job-details.js
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/components/job-details.js b/contrib/views/wfmanager/src/main/resources/ui/app/components/job-details.js
index e403dc4..0e5fd36 100644
--- a/contrib/views/wfmanager/src/main/resources/ui/app/components/job-details.js
+++ b/contrib/views/wfmanager/src/main/resources/ui/app/components/job-details.js
@@ -349,6 +349,7 @@ export default Ember.Component.extend({
           url = url + '&type=action&scope='+ params.logActionList;
         }
         Ember.$.get(url,function(response){
+          response = response.trim().length > 0 ? response : "No messages present";
           this.set('model.jobLog', response);
         }.bind(this)).fail(function(error){
           this.set('error', error);
@@ -356,6 +357,7 @@ export default Ember.Component.extend({
       },
       getErrorLog : function (){
         Ember.$.get(Ember.ENV.API_URL+'/v2/job/'+this.get('id')+'?show=errorlog',function(response){
+          response = response.trim().length > 0 ? response : "No messages present";
           this.set('model.errorLog', response);
         }.bind(this)).fail(function(error){
           this.set('error', error);
@@ -363,6 +365,7 @@ export default Ember.Component.extend({
       },
       getAuditLog : function (){
         Ember.$.get(Ember.ENV.API_URL+'/v2/job/'+this.get('id')+'?show=auditlog',function(response){
+          response = response.trim().length > 0 ? response : "No messages present";
           this.set('model.auditLog', response);
         }.bind(this)).fail(function(error){
           this.set('error', error);


[35/50] ambari git commit: AMBARI-19030 Service Auto Start operations are permitted during Upgrade. (atkach)

Posted by sw...@apache.org.
AMBARI-19030 Service Auto Start operations are permitted during Upgrade. (atkach)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/1bd562a1
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/1bd562a1
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/1bd562a1

Branch: refs/heads/branch-feature-AMBARI-18901
Commit: 1bd562a1803efa12a4afcbf312c5a898f63bff41
Parents: adde3db
Author: Andrii Tkach <at...@apache.org>
Authored: Wed Nov 30 14:19:52 2016 +0200
Committer: Andrii Tkach <at...@apache.org>
Committed: Wed Nov 30 15:45:04 2016 +0200

----------------------------------------------------------------------
 .../app/templates/main/admin/service_auto_start.hbs       | 10 ++++++----
 ambari-web/app/views/main/admin.js                        |  3 ++-
 ambari-web/app/views/main/admin/service_auto_start.js     |  4 ++++
 .../main/admin/service_auto_start/component_auto_start.js |  1 +
 .../admin/stack_upgrade/upgrade_version_box_view_test.js  |  2 +-
 5 files changed, 14 insertions(+), 6 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/1bd562a1/ambari-web/app/templates/main/admin/service_auto_start.hbs
----------------------------------------------------------------------
diff --git a/ambari-web/app/templates/main/admin/service_auto_start.hbs b/ambari-web/app/templates/main/admin/service_auto_start.hbs
index e2eb192..a14add2 100644
--- a/ambari-web/app/templates/main/admin/service_auto_start.hbs
+++ b/ambari-web/app/templates/main/admin/service_auto_start.hbs
@@ -63,10 +63,12 @@
                                 </div>
                             </div>
                         {{/each}}
-                        <div class="offset4">
-                            <a href="#" class="enable-all-link" {{action enableAll tab target="controller"}}>{{t common.enableAll}}</a> |
-                            <a href="#" {{action disableAll tab target="controller"}}>{{t common.disableAll}}</a>
-                        </div>
+                        {{#isAuthorized "SERVICE.START_STOP, CLUSTER.MODIFY_CONFIGS"}}
+                            <div class="offset4">
+                                <a href="#" class="enable-all-link" {{action enableAll tab target="controller"}}>{{t common.enableAll}}</a> |
+                                <a href="#" {{action disableAll tab target="controller"}}>{{t common.disableAll}}</a>
+                            </div>
+                        {{/isAuthorized}}
                     </div>
                 {{/each}}
             </div>

http://git-wip-us.apache.org/repos/asf/ambari/blob/1bd562a1/ambari-web/app/views/main/admin.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views/main/admin.js b/ambari-web/app/views/main/admin.js
index 704527b..0fa84e8 100644
--- a/ambari-web/app/views/main/admin.js
+++ b/ambari-web/app/views/main/admin.js
@@ -51,7 +51,8 @@ App.MainAdminView = Em.View.extend({
         items.push({
           name: 'serviceAutoStart',
           url: 'adminServiceAutoStart',
-          label: Em.I18n.t('admin.serviceAutoStart.title')
+          label: Em.I18n.t('admin.serviceAutoStart.title'),
+          disabled: App.get('upgradeInProgress') || App.get('upgradeHolding')
         });
       }
     }

http://git-wip-us.apache.org/repos/asf/ambari/blob/1bd562a1/ambari-web/app/views/main/admin/service_auto_start.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views/main/admin/service_auto_start.js b/ambari-web/app/views/main/admin/service_auto_start.js
index 5b10d1b..51512a2 100644
--- a/ambari-web/app/views/main/admin/service_auto_start.js
+++ b/ambari-web/app/views/main/admin/service_auto_start.js
@@ -30,8 +30,11 @@ App.MainAdminServiceAutoStartView = Em.View.extend({
 
   savedRecoveryEnabled: false,
 
+  isDisabled: false,
+
   didInsertElement: function () {
     var self = this;
+    this.set('isDisabled', !App.isAuthorized('SERVICE.START_STOP, CLUSTER.MODIFY_CONFIGS'));
     this.get('controller').loadClusterConfig().done(function (data) {
       var tag = [
         {
@@ -77,6 +80,7 @@ App.MainAdminServiceAutoStartView = Em.View.extend({
         offText: Em.I18n.t('common.disabled'),
         offColor: 'default',
         onColor: 'success',
+        disabled: this.get('isDisabled'),
         handleWidth: Math.max(Em.I18n.t('common.enabled').length, Em.I18n.t('common.disabled').length) * 8,
         onSwitchChange: function (event, state) {
           self.updateClusterConfigs(state);

http://git-wip-us.apache.org/repos/asf/ambari/blob/1bd562a1/ambari-web/app/views/main/admin/service_auto_start/component_auto_start.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views/main/admin/service_auto_start/component_auto_start.js b/ambari-web/app/views/main/admin/service_auto_start/component_auto_start.js
index ceafd21..9794d6f 100644
--- a/ambari-web/app/views/main/admin/service_auto_start/component_auto_start.js
+++ b/ambari-web/app/views/main/admin/service_auto_start/component_auto_start.js
@@ -56,6 +56,7 @@ App.MainAdminServiceAutoStartComponentView = Em.View.extend({
         offText: Em.I18n.t('common.disabled'),
         offColor: 'default',
         onColor: 'success',
+        disabled: this.get('parentView.isDisabled'),
         handleWidth: Math.max(Em.I18n.t('common.enabled').length, Em.I18n.t('common.disabled').length) * 8,
         onSwitchChange: function (event, state) {
           self.set('tab.enabledComponents', self.get('tab.enabledComponents') + (state ? 1 : -1));

http://git-wip-us.apache.org/repos/asf/ambari/blob/1bd562a1/ambari-web/test/views/main/admin/stack_upgrade/upgrade_version_box_view_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/views/main/admin/stack_upgrade/upgrade_version_box_view_test.js b/ambari-web/test/views/main/admin/stack_upgrade/upgrade_version_box_view_test.js
index 4eaaac0..45ce8e9 100644
--- a/ambari-web/test/views/main/admin/stack_upgrade/upgrade_version_box_view_test.js
+++ b/ambari-web/test/views/main/admin/stack_upgrade/upgrade_version_box_view_test.js
@@ -718,7 +718,7 @@ describe('App.UpgradeVersionBoxView', function () {
           status: 'UPGRADING',
           isLink: true,
           action: 'openUpgradeDialog',
-          iconClass: 'glyphicon glyphicon-pause',
+          iconClass: 'icon-pause',
           text: Em.I18n.t('admin.stackVersions.version.upgrade.pause')
         },
         title: 'upgrading, holding, isWizardRestricted=true'


[30/50] ambari git commit: Revert "AMBARI-18903. Implement Create Alerts: Create a base wizard for all steps.(xiwang)"

Posted by sw...@apache.org.
Revert "AMBARI-18903. Implement Create Alerts: Create a base wizard for all steps.(xiwang)"

This reverts commit 521995f4a9884ee62bd92f9f226b152f0155afc9.


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/40fde4a6
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/40fde4a6
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/40fde4a6

Branch: refs/heads/branch-feature-AMBARI-18901
Commit: 40fde4a65a4d21477db53d402af422348f1821eb
Parents: be8354a
Author: Xi Wang <xi...@apache.org>
Authored: Tue Nov 29 17:22:39 2016 -0800
Committer: Xi Wang <xi...@apache.org>
Committed: Tue Nov 29 17:23:52 2016 -0800

----------------------------------------------------------------------
 .../main/alerts/alert_definitions_actions_controller.js          | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/40fde4a6/ambari-web/app/controllers/main/alerts/alert_definitions_actions_controller.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/controllers/main/alerts/alert_definitions_actions_controller.js b/ambari-web/app/controllers/main/alerts/alert_definitions_actions_controller.js
index ad239c6..f3f0387 100644
--- a/ambari-web/app/controllers/main/alerts/alert_definitions_actions_controller.js
+++ b/ambari-web/app/controllers/main/alerts/alert_definitions_actions_controller.js
@@ -27,12 +27,12 @@ App.MainAlertDefinitionActionsController = Em.ArrayController.extend({
    * @type {{title: string, icon: string, action: string, showDivider: boolean}[]}
    */
   content: [
-    {
+    /*{
       title: Em.I18n.t('alerts.actions.create'),
       icon: 'icon-plus',
       action: 'createNewAlertDefinition',
       showDivider: true
-    },
+    },*/
     {
       title: Em.I18n.t('alerts.actions.manageGroups'),
       icon: 'icon-th-large',


[46/50] ambari git commit: AMBARI-19037. Clean up logs for the usage of taskId for easy lookup of command progress and status (smohanty)

Posted by sw...@apache.org.
AMBARI-19037. Clean up logs for the usage of taskId for easy lookup of command progress and status (smohanty)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/707f07f6
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/707f07f6
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/707f07f6

Branch: refs/heads/branch-feature-AMBARI-18901
Commit: 707f07f637db30b5b9cf26f165fcaaeb51ef0fce
Parents: c578a37
Author: Sumit Mohanty <sm...@hortonworks.com>
Authored: Wed Nov 30 14:45:40 2016 -0800
Committer: Sumit Mohanty <sm...@hortonworks.com>
Committed: Wed Nov 30 14:46:12 2016 -0800

----------------------------------------------------------------------
 ambari-agent/src/main/python/ambari_agent/ActionQueue.py  | 10 +++++-----
 .../main/python/ambari_agent/CustomServiceOrchestrator.py |  4 ++--
 .../org/apache/ambari/server/agent/HeartBeatHandler.java  |  2 +-
 .../apache/ambari/server/agent/HeartbeatProcessor.java    |  2 +-
 .../server/state/services/RetryUpgradeActionService.java  |  2 +-
 5 files changed, 10 insertions(+), 10 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/707f07f6/ambari-agent/src/main/python/ambari_agent/ActionQueue.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/main/python/ambari_agent/ActionQueue.py b/ambari-agent/src/main/python/ambari_agent/ActionQueue.py
index f104939..3ec0621 100644
--- a/ambari-agent/src/main/python/ambari_agent/ActionQueue.py
+++ b/ambari-agent/src/main/python/ambari_agent/ActionQueue.py
@@ -125,7 +125,7 @@ class ActionQueue(threading.Thread):
   def cancel(self, commands):
     for command in commands:
 
-      logger.info("Canceling command {tid}".format(tid = str(command['target_task_id'])))
+      logger.info("Canceling command with taskId = {tid}".format(tid = str(command['target_task_id'])))
       logger.debug(pprint.pformat(command))
 
       task_id = command['target_task_id']
@@ -321,7 +321,7 @@ class ActionQueue(threading.Thread):
         else:
           status = self.FAILED_STATUS
           if (commandresult['exitcode'] == -signal.SIGTERM) or (commandresult['exitcode'] == -signal.SIGKILL):
-            logger.info('Command {cid} was canceled!'.format(cid=taskId))
+            logger.info('Command with taskId = {cid} was canceled!'.format(cid=taskId))
             break
 
       if status != self.COMPLETED_STATUS and retryAble and retryDuration > 0:
@@ -330,17 +330,17 @@ class ActionQueue(threading.Thread):
           delay = retryDuration
         retryDuration -= delay  # allow one last attempt
         commandresult['stderr'] += "\n\nCommand failed. Retrying command execution ...\n\n"
-        logger.info("Retrying command id {cid} after a wait of {delay}".format(cid=taskId, delay=delay))
+        logger.info("Retrying command with taskId = {cid} after a wait of {delay}".format(cid=taskId, delay=delay))
         time.sleep(delay)
         continue
       else:
-        logger.info("Quit retrying for command id {cid}. Status: {status}, retryAble: {retryAble}, retryDuration (sec): {retryDuration}, last delay (sec): {delay}"
+        logger.info("Quit retrying for command with taskId = {cid}. Status: {status}, retryAble: {retryAble}, retryDuration (sec): {retryDuration}, last delay (sec): {delay}"
                     .format(cid=taskId, status=status, retryAble=retryAble, retryDuration=retryDuration, delay=delay))
         break
 
     # final result to stdout
     commandresult['stdout'] += '\n\nCommand completed successfully!\n' if status == self.COMPLETED_STATUS else '\n\nCommand failed after ' + str(numAttempts) + ' tries\n'
-    logger.info('Command {cid} completed successfully!'.format(cid=taskId) if status == self.COMPLETED_STATUS else 'Command {cid} failed after {attempts} tries'.format(cid=taskId, attempts=numAttempts))
+    logger.info('Command with taskId = {cid} completed successfully!'.format(cid=taskId) if status == self.COMPLETED_STATUS else 'Command with taskId = {cid} failed after {attempts} tries'.format(cid=taskId, attempts=numAttempts))
 
     roleResult = self.commandStatuses.generate_report_template(command)
     roleResult.update({

http://git-wip-us.apache.org/repos/asf/ambari/blob/707f07f6/ambari-agent/src/main/python/ambari_agent/CustomServiceOrchestrator.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/main/python/ambari_agent/CustomServiceOrchestrator.py b/ambari-agent/src/main/python/ambari_agent/CustomServiceOrchestrator.py
index ebd3506..7d61611 100644
--- a/ambari-agent/src/main/python/ambari_agent/CustomServiceOrchestrator.py
+++ b/ambari-agent/src/main/python/ambari_agent/CustomServiceOrchestrator.py
@@ -97,12 +97,12 @@ class CustomServiceOrchestrator():
       if task_id in self.commands_in_progress.keys():
         pid = self.commands_in_progress.get(task_id)
         self.commands_in_progress[task_id] = reason
-        logger.info("Canceling command with task_id - {tid}, " \
+        logger.info("Canceling command with taskId = {tid}, " \
                     "reason - {reason} . Killing process {pid}"
                     .format(tid=str(task_id), reason=reason, pid=pid))
         shell.kill_process_with_children(pid)
       else: 
-        logger.warn("Unable to find pid by taskId = %s" % task_id)
+        logger.warn("Unable to find process associated with taskId = %s" % task_id)
 
   def get_py_executor(self, forced_command_name):
     """

http://git-wip-us.apache.org/repos/asf/ambari/blob/707f07f6/ambari-server/src/main/java/org/apache/ambari/server/agent/HeartBeatHandler.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/agent/HeartBeatHandler.java b/ambari-server/src/main/java/org/apache/ambari/server/agent/HeartBeatHandler.java
index a25b875..75bef30 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/agent/HeartBeatHandler.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/agent/HeartBeatHandler.java
@@ -329,7 +329,7 @@ public class HeartBeatHandler {
           case BACKGROUND_EXECUTION_COMMAND:
           case EXECUTION_COMMAND: {
             ExecutionCommand ec = (ExecutionCommand)ac;
-            LOG.info("HeartBeatHandler.sendCommands: sending ExecutionCommand for host {}, role {}, roleCommand {}, and command ID {}, task ID {}",
+            LOG.info("HeartBeatHandler.sendCommands: sending ExecutionCommand for host {}, role {}, roleCommand {}, and command ID {}, taskId {}",
                      ec.getHostname(), ec.getRole(), ec.getRoleCommand(), ec.getCommandId(), ec.getTaskId());
             Map<String, String> hlp = ec.getHostLevelParams();
             if (hlp != null) {

http://git-wip-us.apache.org/repos/asf/ambari/blob/707f07f6/ambari-server/src/main/java/org/apache/ambari/server/agent/HeartbeatProcessor.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/agent/HeartbeatProcessor.java b/ambari-server/src/main/java/org/apache/ambari/server/agent/HeartbeatProcessor.java
index 2448c99..8f4782e 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/agent/HeartbeatProcessor.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/agent/HeartbeatProcessor.java
@@ -552,7 +552,7 @@ public class HeartbeatProcessor extends AbstractService{
             }
 
             LOG.error("Operation failed - may be retried. Service component host: "
-                + schName + ", host: " + hostname + " Action id " + report.getActionId() + " and Task id " + report.getTaskId());
+                + schName + ", host: " + hostname + " Action id " + report.getActionId() + " and taskId " + report.getTaskId());
             if (actionManager.isInProgressCommand(report)) {
               scHost.handleEvent(new ServiceComponentHostOpFailedEvent
                   (schName, hostname, now));

http://git-wip-us.apache.org/repos/asf/ambari/blob/707f07f6/ambari-server/src/main/java/org/apache/ambari/server/state/services/RetryUpgradeActionService.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/services/RetryUpgradeActionService.java b/ambari-server/src/main/java/org/apache/ambari/server/state/services/RetryUpgradeActionService.java
index 72b8ab7..1ea5558 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/services/RetryUpgradeActionService.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/services/RetryUpgradeActionService.java
@@ -199,7 +199,7 @@ public class RetryUpgradeActionService extends AbstractScheduledService {
     List<HostRoleCommandEntity> holdingCommands = m_hostRoleCommandDAO.findByRequestIdAndStatuses(requestId, HOLDING_STATUSES);
     if (holdingCommands.size() > 0) {
       for (HostRoleCommandEntity hrc : holdingCommands) {
-        LOG.debug("Comparing task id: {}, original start time: {}, now: {}",
+        LOG.debug("Comparing taskId: {}, original start time: {}, now: {}",
             hrc.getTaskId(), hrc.getOriginalStartTime(), now);
 
         /*


[26/50] ambari git commit: AMBARI-18792. Update some configuration properties for hive interactive for the HDP 2.6 stack (Siddharth Seth via smohanty)

Posted by sw...@apache.org.
AMBARI-18792. Update some configuration properties for hive interactive for the HDP 2.6 stack (Siddharth Seth via smohanty)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/f8bfa056
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/f8bfa056
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/f8bfa056

Branch: refs/heads/branch-feature-AMBARI-18901
Commit: f8bfa056574d1f8dc695615eea70fae74b156fc2
Parents: aba9640
Author: Sumit Mohanty <sm...@hortonworks.com>
Authored: Tue Nov 29 11:39:53 2016 -0800
Committer: Sumit Mohanty <sm...@hortonworks.com>
Committed: Tue Nov 29 11:39:53 2016 -0800

----------------------------------------------------------------------
 .../server/upgrade/UpgradeCatalog250.java       | 53 ++++++++++++-
 .../configuration/hive-interactive-site.xml     | 58 +++++++++++++++
 .../HIVE/configuration/tez-interactive-site.xml | 78 ++++++++++++++++++++
 3 files changed, 188 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/f8bfa056/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog250.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog250.java b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog250.java
index bc3c120..44ce895 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog250.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog250.java
@@ -23,6 +23,7 @@ import java.util.Collections;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
+import java.util.Set;
 
 import org.apache.ambari.server.AmbariException;
 import org.apache.ambari.server.actionmanager.CommandExecutionType;
@@ -123,7 +124,7 @@ public class UpgradeCatalog250 extends AbstractUpgradeCatalog {
    */
   @Override
   protected void executePreDMLUpdates() throws AmbariException, SQLException {
-
+    updateHiveLlapConfigs();
   }
 
   /**
@@ -201,6 +202,56 @@ public class UpgradeCatalog250 extends AbstractUpgradeCatalog {
     return content;
   }
 
+  protected void updateHiveLlapConfigs() throws AmbariException {
+    AmbariManagementController ambariManagementController = injector.getInstance(AmbariManagementController.class);
+    Clusters clusters = ambariManagementController.getClusters();
+
+    if (clusters != null) {
+      Map<String, Cluster> clusterMap = clusters.getClusters();
+
+      if (clusterMap != null && !clusterMap.isEmpty()) {
+        for (final Cluster cluster : clusterMap.values()) {
+          Set<String> installedServices = cluster.getServices().keySet();
+
+          if (installedServices.contains("HIVE")) {
+            Config hiveSite = cluster.getDesiredConfigByType("hive-interactive-site");
+            if (hiveSite != null) {
+              Map<String, String> hiveSiteProperties = hiveSite.getProperties();
+              String schedulerDelay = hiveSiteProperties.get("hive.llap.task.scheduler.locality.delay");
+              if (schedulerDelay != null) {
+                // Property exists. Change to new default if set to -1.
+                if (schedulerDelay.length() != 0) {
+                  try {
+                    int schedulerDelayInt = Integer.parseInt(schedulerDelay);
+                    if (schedulerDelayInt == -1) {
+                      // Old default. Set to new default.
+                      updateConfigurationProperties("hive-interactive-site", Collections
+                              .singletonMap("hive.llap.task.scheduler.locality.delay", "8000"), true,
+                          false);
+                    }
+                  } catch (NumberFormatException e) {
+                    // Invalid existing value. Set to new default.
+                    updateConfigurationProperties("hive-interactive-site", Collections
+                            .singletonMap("hive.llap.task.scheduler.locality.delay", "8000"), true,
+                        false);
+                  }
+                }
+              }
+              updateConfigurationProperties("hive-interactive-site",
+                  Collections.singletonMap("hive.mapjoin.hybridgrace.hashtable", "true"), true,
+                  false);
+              updateConfigurationProperties("tez-interactive-site",
+                  Collections.singletonMap("tez.session.am.dag.submit.timeout.secs", "1209600"), true,
+                  false);
+              // Explicitly skipping hive.llap.allow.permanent.fns during upgrades, since it's related to security,
+              // and we don't know if the value is set by the user or as a result of the previous default.
+            }
+          }
+        }
+      }
+    }
+  }
+
   /**
    * Creates the servicecomponent_version table
    *

http://git-wip-us.apache.org/repos/asf/ambari/blob/f8bfa056/ambari-server/src/main/resources/stacks/HDP/2.6/services/HIVE/configuration/hive-interactive-site.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.6/services/HIVE/configuration/hive-interactive-site.xml b/ambari-server/src/main/resources/stacks/HDP/2.6/services/HIVE/configuration/hive-interactive-site.xml
new file mode 100644
index 0000000..f33ade7
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.6/services/HIVE/configuration/hive-interactive-site.xml
@@ -0,0 +1,58 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+<!--
+Licensed to the Apache Software Foundation (ASF) under one or more
+contributor license agreements. See the NOTICE file distributed with
+this work for additional information regarding copyright ownership.
+The ASF licenses this file to You under the Apache License, Version 2.0
+(the "License"); you may not use this file except in compliance with
+the License. You may obtain a copy of the License at
+
+http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+-->
+<configuration supports_final="true">
+  <!--
+  Note: This file includes only those configs which are supposed to have different value from the parent hive/hive-site.
+       It inherits the other required configs from hive/hive-site.
+       The inheritance logic in order to get hive2/hive-site goes like this :
+
+       1. We read the hive/hive-site which includes the stack defaults and Stack Advisor recommended values.
+       2. We take the read hive/hive-site (step 1), and on top of it apply the hive-interactive-site to get
+          hive2/hive-site.
+
+       Therefore, any config defined here will override the config value read from hive2/hive-site (step 1).
+  -->
+  <property>
+    <name>hive.llap.task.scheduler.locality.delay</name>
+    <value>8000</value>
+    <description>
+      Amount of time to wait (in ms) before allocating a request which contains location information,
+      to a location other than the ones requested. Set to -1 for an infinite delay, 0
+      for no delay.
+    </description>
+    <on-ambari-upgrade add="true"/>
+  </property>
+
+  <property>
+    <name>hive.mapjoin.hybridgrace.hashtable</name>
+    <value>true</value>
+    <description>Whether to use hybrid grace hash join as the join method for mapjoin. Tez only.</description>
+    <on-ambari-upgrade add="true"/>
+  </property>
+
+  <property>
+    <name>hive.llap.enable.grace.join.in.llap</name>
+    <value>false</value>
+    <description>Override if grace join should be allowed to run in llap for regular map joins.
+      Dynamic partitioned joins will honor the hive.mapjoin.hybridgrace.hashtable property in LLAP
+    </description>
+    <on-ambari-upgrade add="true"/>
+  </property>
+
+</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/f8bfa056/ambari-server/src/main/resources/stacks/HDP/2.6/services/HIVE/configuration/tez-interactive-site.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.6/services/HIVE/configuration/tez-interactive-site.xml b/ambari-server/src/main/resources/stacks/HDP/2.6/services/HIVE/configuration/tez-interactive-site.xml
new file mode 100644
index 0000000..f7de121
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.6/services/HIVE/configuration/tez-interactive-site.xml
@@ -0,0 +1,78 @@
+<?xml version="1.0"?>
+<!--
+  Licensed to the Apache Software Foundation (ASF) under one
+  or more contributor license agreements.  See the NOTICE file
+  distributed with this work for additional information
+  regarding copyright ownership.  The ASF licenses this file
+  to you under the Apache License, Version 2.0 (the
+  "License"); you may not use this file except in compliance
+  with the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License.
+-->
+<configuration supports_final="true">
+  <!--
+  Note: This file includes only those configs which are supposed to have different value from the parent tez/tez-site.
+        It inherits the other required configs from tez/tez-site.
+        The inheritance logic in order to get tez_hive2/tez-site goes like this :
+
+        1. We read the tez/tez-site which includes the stack defaults and Stack Advisor recommended values.
+        2. We take the read tez/tez-site (step 1), and on top of it apply the tez-interactive-site to get tez_hive2/tez-site.
+
+        Therefore, any config defined here will override the config value read from tez/tez-site (step 1).
+  -->
+  <property>
+    <name>tez.task.heartbeat.timeout.check-ms</name>
+    <value>11000</value>
+    <description>The time interval, in milliseconds, at which the AM will check
+      for timed out tasks
+    </description>
+    <on-ambari-upgrade add="true"/>
+  </property>
+  <property>
+    <name>tez.task.timeout-ms</name>
+    <value>20000</value>
+    <description>Amount of time the Tez AM waits before marking a task which
+      has not sent in a heartbeat, as timed out
+    </description>
+    <on-ambari-upgrade add="true"/>
+  </property>
+  <property>
+    <name>tez.am.client.heartbeat.timeout.secs</name>
+    <value>60</value>
+    <description>The time interval, after which an AM will kill itself, if it
+      does not receive a heartbeat from the client.
+    </description>
+    <on-ambari-upgrade add="true"/>
+  </property>
+  <property>
+    <name>tez.am.client.heartbeat.poll.interval.millis</name>
+    <value>6000</value>
+    <description>The interval at which the AM checks for a client heartbeat
+      timeout
+    </description>
+    <on-ambari-upgrade add="true"/>
+  </property>
+  <property>
+    <name>tez.am.node-blacklisting.enabled</name>
+    <value>false</value>
+    <description>Whether to enable blacklisting in Tez AMs. Disable for LLAP
+    </description>
+    <on-ambari-upgrade add="true"/>
+  </property>
+  <property>
+    <name>tez.session.am.dag.submit.timeout.secs</name>
+    <value>1209600</value>
+    <description>The amoung of time an AM will wait, before killing itself,
+      if not DAG is submitted.
+    </description>
+    <on-ambari-upgrade add="true"/>
+  </property>
+
+</configuration>


[27/50] ambari git commit: AMBARI-19003. Perf: Fix deploy-gce-perf-cluster.py to deploy separate server onto own cluster with different settings for more cores and MySQL DB (alejandro)

Posted by sw...@apache.org.
AMBARI-19003. Perf: Fix deploy-gce-perf-cluster.py to deploy separate server onto own cluster with different settings for more cores and MySQL DB (alejandro)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/052da577
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/052da577
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/052da577

Branch: refs/heads/branch-feature-AMBARI-18901
Commit: 052da577bcf5c3130337b7e783bda2b9c0207127
Parents: f8bfa05
Author: Alejandro Fernandez <af...@hortonworks.com>
Authored: Tue Nov 29 12:43:58 2016 -0800
Committer: Alejandro Fernandez <af...@hortonworks.com>
Committed: Tue Nov 29 12:43:58 2016 -0800

----------------------------------------------------------------------
 contrib/utils/perf/deploy-gce-perf-cluster.py | 243 ++++++++++++++-------
 1 file changed, 169 insertions(+), 74 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/052da577/contrib/utils/perf/deploy-gce-perf-cluster.py
----------------------------------------------------------------------
diff --git a/contrib/utils/perf/deploy-gce-perf-cluster.py b/contrib/utils/perf/deploy-gce-perf-cluster.py
index 95ed98f..4737c6f 100644
--- a/contrib/utils/perf/deploy-gce-perf-cluster.py
+++ b/contrib/utils/perf/deploy-gce-perf-cluster.py
@@ -148,6 +148,7 @@ class SCP:
 
     return {"exitstatus": scpstat.returncode, "log": log, "errormsg": errorMsg}
 
+
 # main method to parse arguments from user and start work
 def main():
   parser = argparse.ArgumentParser(
@@ -178,6 +179,30 @@ def main():
   args = parser.parse_args()
   do_work(args)
 
+
+def do_work(args):
+  """
+  Check that all required args are passed in. If so, deploy the cluster.
+  :param args: Command line args
+  """
+  if not args.controller:
+    raise Exception("GCE controller ip address is not defined!")
+
+  if not args.key:
+    raise Exception("Path to gce ssh key is not defined!")
+
+  if not args.cluster_suffix:
+    raise Exception("Cluster name suffix is not defined!")
+
+  if not args.agent_prefix:
+    raise Exception("Agent name prefix is not defined!")
+
+  if not args.agents_count:
+    raise Exception("Agents count for whole cluster is not defined (will put 50 Agents per VM)!")
+
+  deploy_cluster(args)
+
+
 def deploy_cluster(args):
   """
   Process cluster deployment
@@ -186,49 +211,54 @@ def deploy_cluster(args):
   # When dividing, need to get the ceil.
   number_of_nodes = ((args.agents_count - 1) / NUMBER_OF_AGENTS_ON_HOST) + 1
 
-  # trying to create cluster with needed params
-  print "Creating cluster {0}-{1} with {2} large nodes on centos6...".format(cluster_prefix, args.cluster_suffix, str(number_of_nodes))
-  execute_command(args, args.controller, "/usr/sbin/gce up {0}-{1} {2} --centos6 --large".format(cluster_prefix, args.cluster_suffix, str(number_of_nodes)),
-                  "Failed to create cluster, probably not enough resources!", "-tt")
-
-  # VMs are not accessible immediately
-  time.sleep(10)
+  # In case of an error after creating VMs, can simply comment out this function to run again without creating VMs.
+  create_vms(args, number_of_nodes)
 
   # getting list of vms information like hostname and ip address
   print "Getting list of virtual machines from cluster..."
   # Dictionary from host name to IP
-  vms = get_vms_list(args)
+  (server_dict, agents_dict) = get_vms_list(args)
 
   # check number of nodes in cluster to be the same as user asked
   print "Checking count of created nodes in cluster..."
-  if not vms or len(vms) < number_of_nodes:
-    raise Exception("Cannot bring up enough nodes. Requested {0}, but got {1}. Probably not enough resources!".format(number_of_nodes, len(vms)))
+  if not agents_dict or len(agents_dict) < number_of_nodes:
+    raise Exception("Cannot bring up enough nodes. Requested {0}, but got {1}. Probably not enough resources!".format(number_of_nodes, len(agents_dict)))
 
-  print "GCE cluster was successfully created!"
-  pretty_print_vms(vms)
+  print "GCE cluster was successfully created!\n"
 
   # installing/starting ambari-server and ambari-agents on each host
-  server_host_name = sorted(vms.items())[0][0]
-  server_installed = False
+  server_item = server_dict.items()[0]
+  server_host_name = server_item[0]
+  server_ip = server_item[1]
+  print "=========================="
+  print "Server Hostname: %s" % server_host_name
+  print "Server IP: %s" % server_ip
+  print "==========================\n"
+
+  # Sort the agents by hostname into a list.
+  sorted_agents = sort_hosts(agents_dict)
+  pretty_print_vms(sorted_agents)
 
-  print "Creating server.sh script (which will be executed on server to install/configure/start ambari-server and ambari-agent)..."
-  create_server_script(args, server_host_name)
+  print "Creating server.sh script (which will be executed on server to install/configure/start ambari-server)..."
+  create_server_script(server_host_name)
 
   print "Creating agent.sh script (which will be executed on agent hosts to install/configure/start ambari-agent..."
-  create_agent_script(args, server_host_name)
+  create_agent_script(server_host_name)
 
   time.sleep(10)
 
+  prepare_server(args, server_host_name, server_ip)
+
   # If the user asks for a number of agents that is not a multiple of 50, then only create how many are needed instead
   # of 50 on every VM.
   num_agents_left_to_create = args.agents_count
-
   start_num = 1
-  for (hostname, ip) in sorted(vms.items()):
+
+  for (hostname, ip) in sorted_agents:
     num_agents_on_this_host = min(num_agents_left_to_create, NUMBER_OF_AGENTS_ON_HOST)
 
     print "=========================="
-    print "Working on VM {0} that will contain hosts %d - %d".format(hostname, start_num, start_num + num_agents_on_this_host - 1)
+    print "Working on VM {0} that will contain hosts {1} - {2}".format(hostname, start_num, start_num + num_agents_on_this_host - 1)
 
     # The agent multiplier config will be different on each VM.
 
@@ -236,90 +266,117 @@ def deploy_cluster(args):
     start_num += num_agents_on_this_host
     num_agents_left_to_create -= num_agents_on_this_host
 
-    if not server_installed:
-      remote_path = "/server.sh"
-      local_path = "server.sh"
-      print "Copying server.sh to {0}...".format(hostname)
-      put_file(args, ip, local_path, remote_path, "Failed to copy file!")
-
-      print "Generating agent-multiplier.conf"
-      execute_command(args, ip, cmd_generate_multiplier_conf, "Failed to generate agent-multiplier.conf on host {0}".format(hostname))
-
-      print "Executing remote ssh command (set correct permissions and start executing server.sh in separate process) on {0}...".format(hostname)
-      execute_command(args, ip, "cd /; chmod 777 server.sh; nohup ./server.sh >/server.log 2>&1 &",
-                    "Install/configure/start server script failed!")
-      server_installed = True
-    else:
-      remote_path = "/agent.sh"
-      local_path = "agent.sh"
-      print "Copying agent.sh to {0}...".format(hostname)
-      put_file(args, ip, local_path, remote_path, "Failed to copy file!")
-
-      print "Generating agent-multiplier.conf"
-      execute_command(args, ip, cmd_generate_multiplier_conf, "Failed to generate agent-multiplier.conf on host {0}".format(hostname))
-
-      print "Executing remote ssh command (set correct permissions and start executing agent.sh in separate process) on {0}...".format(hostname)
-      execute_command(args, ip, "cd /; chmod 777 agent.sh; nohup ./agent.sh >/agent.log 2>&1 &",
-                    "Install/configure start agent script failed!")
+    prepare_agent(args, hostname, ip, cmd_generate_multiplier_conf)
 
+  pass
   print "All scripts where successfully copied and started on all hosts. " \
         "\nPay attention that server.sh script need 5 minutes to finish and agent.sh need 3 minutes!"
 
 
-def do_work(args):
+def create_vms(args, number_of_nodes):
   """
-  Check that all required args are passed in. If so, deploy the cluster.
+  Request the server and VMs for the agents from GCE.
   :param args: Command line args
+  :param number_of_nodes: Number of VMs to request.
   """
-  if not args.controller:
-    raise Exception("GCE controller ip address is not defined!")
+  print "Creating server VM {0}-server-{1} with xxlarge nodes on centos6...".format(cluster_prefix, args.cluster_suffix)
+  execute_command(args, args.controller, "/usr/sbin/gce up {0}-server-{1} 1 --centos6 --xxlarge".format(cluster_prefix, args.cluster_suffix),
+                  "Failed to create server, probably not enough resources!", "-tt")
+  time.sleep(10)
 
-  if not args.key:
-    raise Exception("Path to gce ssh key is not defined!")
+  # trying to create cluster with needed params
+  print "Creating agent VMs {0}-agent-{1} with {2} large nodes on centos6...".format(cluster_prefix, args.cluster_suffix, str(number_of_nodes))
+  execute_command(args, args.controller, "/usr/sbin/gce up {0}-agent-{1} {2} --centos6 --large".format(cluster_prefix, args.cluster_suffix, str(number_of_nodes)),
+                  "Failed to create cluster VMs, probably not enough resources!", "-tt")
 
-  if not args.cluster_suffix:
-    raise Exception("Cluster name suffix is not defined!")
+  # VMs are not accessible immediately
+  time.sleep(10)
 
-  if not args.agent_prefix:
-    raise Exception("Agent name prefix is not defined!")
 
-  if not args.agents_count:
-    raise Exception("Agents count for whole cluster is not defined (will put 50 Agents per VM)!")
+def prepare_server(args, hostname, ip):
+  remote_path = "/server.sh"
+  local_path = "server.sh"
+  print "Copying server.sh to {0}...".format(hostname)
+  put_file(args, ip, local_path, remote_path, "Failed to copy file!")
 
-  deploy_cluster(args)
+  print "Executing remote ssh command (set correct permissions and start executing server.sh in separate process) on {0}...".format(hostname)
+  execute_command(args, ip, "cd /; chmod 777 server.sh; nohup ./server.sh >/server.log 2>&1 &",
+                  "Install/configure/start server script failed!")
 
 
-def create_server_script(args, server_host_name):
+def prepare_agent(args, hostname, ip, cmd_generate_multiplier_conf):
+  remote_path = "/agent.sh"
+  local_path = "agent.sh"
+  print "Copying agent.sh to {0}...".format(hostname)
+  put_file(args, ip, local_path, remote_path, "Failed to copy file!")
+
+  print "Generating agent-multiplier.conf"
+  execute_command(args, ip, cmd_generate_multiplier_conf, "Failed to generate agent-multiplier.conf on host {0}".format(hostname))
+
+  print "Executing remote ssh command (set correct permissions and start executing agent.sh in separate process) on {0}...".format(hostname)
+  execute_command(args, ip, "cd /; chmod 777 agent.sh; nohup ./agent.sh >/agent.log 2>&1 &",
+                  "Install/configure start agent script failed!")
+
+
+def create_server_script(server_host_name):
   """
   Creating server.sh script in the same dir where current script is located
   server.sh script will install, configure and start ambari-server and ambari-agent on host
-  :param args: Command line args
   :param server_host_name: Server host name
   """
 
+  # ambari-server setup <options> may not work property, so doing several calls like
+  # echo "arg=value" >> .../ambari.properties
+
   contents = "#!/bin/bash\n" + \
   "wget -O /etc/yum.repos.d/ambari.repo {0}\n".format(ambari_repo_file_url) + \
-  "yum clean all; yum install git ambari-server ambari-agent -y\n" + \
-  "cd /home; git clone https://github.com/apache/ambari.git\n" + \
+  "yum clean all; yum install git ambari-server -y\n" + \
+  "mkdir /home ; cd /home ; git clone https://github.com/apache/ambari.git\n" + \
   "cp -r /home/ambari/ambari-server/src/main/resources/stacks/PERF /var/lib/ambari-server/resources/stacks/PERF\n" + \
   "cp -r /home/ambari/ambari-server/src/main/resources/stacks/PERF /var/lib/ambari-agent/cache/stacks/PERF\n" + \
+  "\n" + \
+  "\n" + \
+  "yum install mysql-connector-java* -y\n" + \
+  "yum install mysql-server -y\n" + \
+  "service mysqld start\n" + \
+  "mysql -uroot -e \"CREATE DATABASE ambari;\"\n" + \
+  "mysql -uroot -e \"SOURCE /var/lib/ambari-server/resources/Ambari-DDL-MySQL-CREATE.sql;\" ambari\n" + \
+  "mysql -uroot -e \"CREATE USER 'ambari'@'%' IDENTIFIED BY 'bigdata';\"\n" + \
+  "mysql -uroot -e \"GRANT ALL PRIVILEGES ON *.* TO 'ambari'@'%%';\"\n" + \
+  "mysql -uroot -e \"CREATE USER 'ambari'@'localhost' IDENTIFIED BY 'bigdata';\"\n" + \
+  "mysql -uroot -e \"GRANT ALL PRIVILEGES ON *.* TO 'ambari'@'localhost';\"\n" + \
+  "mysql -uroot -e \"CREATE USER 'ambari'@'{0}' IDENTIFIED BY 'bigdata';\"\n".format(server_host_name) + \
+  "mysql -uroot -e \"GRANT ALL PRIVILEGES ON *.* TO 'ambari'@'{0}';\"\n".format(server_host_name) + \
+  "mysql -uroot -e \"FLUSH PRIVILEGES;\"\n" + \
+  "\n" + \
+  "\n" + \
   "ambari-server setup -s\n" + \
+  "ambari-server setup --database mysql --jdbc-db=mysql --jdbc-driver=/usr/share/java/mysql-connector-java.jar --databasehost=localhost --databaseport=3306 --databasename=ambari --databaseusername=ambari --databasepassword=bigdata\n" + \
+  "sed -i -e 's/=postgres/=mysql/g' /etc/ambari-server/conf/ambari.properties\n" + \
+  "sed -i -e 's/server.persistence.type=local/server.persistence.type=remote/g' /etc/ambari-server/conf/ambari.properties\n" + \
+  "sed -i -e 's/local.database.user=postgres//g' /etc/ambari-server/conf/ambari.properties\n" + \
+  "sed -i -e 's/server.jdbc.postgres.schema=ambari//g' /etc/ambari-server/conf/ambari.properties\n" + \
   "sed -i -e 's/false/true/g' /var/lib/ambari-server/resources/stacks/PERF/1.0/metainfo.xml\n" + \
+  "\n" + \
+  "echo 'server.jdbc.driver=com.mysql.jdbc.Driver' >> /etc/ambari-server/conf/ambari.properties\n" + \
+  "echo 'server.jdbc.rca.url=jdbc:mysql://{0}:3306/ambari' >> /etc/ambari-server/conf/ambari.properties\n".format(server_host_name) + \
+  "echo 'server.jdbc.rca.driver=com.mysql.jdbc.Driver' >> /etc/ambari-server/conf/ambari.properties\n" + \
+  "echo 'server.jdbc.url=jdbc:mysql://{0}:3306/ambari' >> /etc/ambari-server/conf/ambari.properties\n".format(server_host_name) + \
+  "echo 'server.jdbc.port=3306' >> /etc/ambari-server/conf/ambari.properties\n" + \
+  "echo 'server.jdbc.hostname=localhost' >> /etc/ambari-server/conf/ambari.properties\n" + \
+  "echo 'server.jdbc.driver.path=/usr/share/java/mysql-connector-java.jar' >> /etc/ambari-server/conf/ambari.properties\n" + \
+  "\n" + \
   "ambari-server start --skip-database-check\n" + \
-  "sed -i -e 's/hostname=localhost/hostname={0}/g' /etc/ambari-agent/conf/ambari-agent.ini\n".format(server_host_name) + \
-  "sed -i -e 's/agent]/agent]\\nhostname_script={0}\\npublic_hostname_script={1}\\n/1' /etc/ambari-agent/conf/ambari-agent.ini\n".format(hostname_script, public_hostname_script) + \
-  "python /home/ambari/ambari-agent/conf/unix/agent-multiplier.py start\n" + \
   "exit 0"
 
   with open("server.sh", "w") as f:
     f.write(contents)
 
 
-def create_agent_script(args, server_host_name):
+def create_agent_script(server_host_name):
   """
   Creating agent.sh script in the same dir where current script is located
   agent.sh script will install, configure and start ambari-agent on host
-  :param args: Command line args
   :param server_host_name: Server host name
   """
 
@@ -327,7 +384,7 @@ def create_agent_script(args, server_host_name):
   contents = "#!/bin/bash\n" + \
   "wget -O /etc/yum.repos.d/ambari.repo {0}\n".format(ambari_repo_file_url) + \
   "yum clean all; yum install git ambari-agent -y\n" + \
-  "cd /home; git clone https://github.com/apache/ambari.git\n" + \
+  "mkdir /home ; cd /home; git clone https://github.com/apache/ambari.git\n" + \
   "cp -r /home/ambari/ambari-server/src/main/resources/stacks/PERF /var/lib/ambari-agent/cache/stacks/PERF\n" + \
   "sed -i -e 's/hostname=localhost/hostname={0}/g' /etc/ambari-agent/conf/ambari-agent.ini\n".format(server_host_name) + \
   "sed -i -e 's/agent]/agent]\\nhostname_script={0}\\npublic_hostname_script={1}\\n/1' /etc/ambari-agent/conf/ambari-agent.ini\n".format(hostname_script, public_hostname_script) + \
@@ -381,12 +438,27 @@ def put_file(args, ip, local_file, remote_file, fail_message, login='root'):
 
 def get_vms_list(args):
   """
+  Get tuple of (x, y) where 
+  x = dictionary from single server host name to ip
+  y = dictionary from multiple agent host names to ip
+  :param args: Command line arguments
+  :return: Tuple of dictionaries of hostnames and ip for server and agents.
+  """
+  # Get the server.
+  server = __get_vms_list_from_name(args, "{0}-server-{1}".format(cluster_prefix, args.cluster_suffix))
+
+  # Get the agents
+  agents = __get_vms_list_from_name(args, "{0}-agent-{1}".format(cluster_prefix, args.cluster_suffix))
+
+  return (server, agents)
+
+def __get_vms_list_from_name(args, cluster_name):
+  """
   Method to parse "gce fqdn {cluster-name}" command output and get hosts and ips pairs for every host in cluster
   :param args: Command line args
   :return: Mapping of VM host name to ip.
   """
-
-  gce_fqdb_cmd = '/usr/sbin/gce fqdn {0}-{1}'.format(cluster_prefix, args.cluster_suffix)
+  gce_fqdb_cmd = '/usr/sbin/gce fqdn {0}'.format(cluster_name)
   out = execute_command(args, args.controller, gce_fqdb_cmd, "Failed to get VMs list!", "-tt")
   lines = out.split('\n')
   #print "LINES=" + str(lines)
@@ -405,13 +477,36 @@ def get_vms_list(args):
     raise Exception('Cannot parse "{0}"'.format(lines))
 
 
+def sort_hosts(hosts):
+  """
+  Sort the hosts by name and take into account the numbers.
+  :param hosts: Dictionary from host name (e.g., perf-9-test, perf-62-test), to the IP
+  :return: Sorted list of tuples
+  """
+  host_names = hosts.keys()
+  sorted_host_tuples = [(None, None),] * len(hosts)
+
+  pattern = re.compile(".*?-agent-.*?(\d+)")
+  for host_name in host_names:
+    m = pattern.match(host_name)
+    if m and len(m.groups()) == 1:
+      number = int(m.group(1))
+      ip = hosts[host_name]
+      sorted_host_tuples[number - 1] = (host_name, ip)
+
+  return sorted_host_tuples
+
+
 def pretty_print_vms(vms):
-  print "----------------------------"
-  print "Server IP: {0}".format(sorted(vms.items())[0][1])
+  """
+  Pretty print the VMs hostnames
+  :param vms: List of tuples (hostname, ip)
+  """
+  print "=========================="
   print "Hostnames of nodes in cluster:"
-  for (hostname, ip) in sorted(vms.items()):
+  for (hostname, ip) in vms:
     print hostname
-  print "----------------------------"
+  print "==========================\n"
 
 
 if __name__ == "__main__":


[05/50] ambari git commit: AMBARI-18982. Failures due to timeout don't report any reason (aonishuk)

Posted by sw...@apache.org.
AMBARI-18982. Failures due to timeout don't report any reason (aonishuk)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/5bdcc418
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/5bdcc418
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/5bdcc418

Branch: refs/heads/branch-feature-AMBARI-18901
Commit: 5bdcc418f404220f04c604413ebbf8ddd48470df
Parents: 4e4a1f9
Author: Andrew Onishuk <ao...@hortonworks.com>
Authored: Mon Nov 28 12:37:44 2016 +0200
Committer: Andrew Onishuk <ao...@hortonworks.com>
Committed: Mon Nov 28 12:37:44 2016 +0200

----------------------------------------------------------------------
 .../main/python/ambari_agent/CustomServiceOrchestrator.py   | 9 +++++----
 .../python/ambari_agent/TestCustomServiceOrchestrator.py    | 4 ++--
 .../apache/ambari/server/actionmanager/ActionScheduler.java | 2 +-
 3 files changed, 8 insertions(+), 7 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/5bdcc418/ambari-agent/src/main/python/ambari_agent/CustomServiceOrchestrator.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/main/python/ambari_agent/CustomServiceOrchestrator.py b/ambari-agent/src/main/python/ambari_agent/CustomServiceOrchestrator.py
index 770484d..ebd3506 100644
--- a/ambari-agent/src/main/python/ambari_agent/CustomServiceOrchestrator.py
+++ b/ambari-agent/src/main/python/ambari_agent/CustomServiceOrchestrator.py
@@ -223,7 +223,7 @@ class CustomServiceOrchestrator():
       # if canceled and not background command
       if handle is None:
         cancel_reason = self.command_canceled_reason(task_id)
-        if cancel_reason:
+        if cancel_reason is not None:
           ret['stdout'] += cancel_reason
           ret['stderr'] += cancel_reason
 
@@ -251,10 +251,11 @@ class CustomServiceOrchestrator():
         logger.debug('Pop with taskId %s' % task_id)
         pid = self.commands_in_progress.pop(task_id)
         if not isinstance(pid, int):
-          if pid:
-            return '\nCommand aborted. ' + pid
+          reason = pid
+          if reason:
+            return "\nCommand aborted. Reason: '{0}'".format(reason)
           else:
-            return ''
+            return "\nCommand aborted."
     return None
 
   def requestComponentStatus(self, command):

http://git-wip-us.apache.org/repos/asf/ambari/blob/5bdcc418/ambari-agent/src/test/python/ambari_agent/TestCustomServiceOrchestrator.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/test/python/ambari_agent/TestCustomServiceOrchestrator.py b/ambari-agent/src/test/python/ambari_agent/TestCustomServiceOrchestrator.py
index 2be6e1a..0304adc 100644
--- a/ambari-agent/src/test/python/ambari_agent/TestCustomServiceOrchestrator.py
+++ b/ambari-agent/src/test/python/ambari_agent/TestCustomServiceOrchestrator.py
@@ -400,8 +400,8 @@ class TestCustomServiceOrchestrator(TestCase):
     ret = async_result.get()
 
     self.assertEqual(ret['exitcode'], 1)
-    self.assertEquals(ret['stdout'], 'killed\nCommand aborted. reason')
-    self.assertEquals(ret['stderr'], 'killed\nCommand aborted. reason')
+    self.assertEquals(ret['stdout'], 'killed\nCommand aborted. Reason: \'reason\'')
+    self.assertEquals(ret['stderr'], 'killed\nCommand aborted. Reason: \'reason\'')
 
     self.assertTrue(kill_process_with_children_mock.called)
     self.assertFalse(command['taskId'] in orchestrator.commands_in_progress.keys())

http://git-wip-us.apache.org/repos/asf/ambari/blob/5bdcc418/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/ActionScheduler.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/ActionScheduler.java b/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/ActionScheduler.java
index 7a84536..e80b020 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/ActionScheduler.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/ActionScheduler.java
@@ -1246,7 +1246,7 @@ class ActionScheduler implements Runnable {
               hostRoleCommand.getStatus() == HostRoleStatus.IN_PROGRESS) {
           CancelCommand cancelCommand = new CancelCommand();
           cancelCommand.setTargetTaskId(hostRoleCommand.getTaskId());
-          cancelCommand.setReason("");
+          cancelCommand.setReason("Stage timeout");
           actionQueue.enqueue(hostRoleCommand.getHostName(), cancelCommand);
         }
       }


[03/50] ambari git commit: AMBARI-18988. [Intermittent]Ambari-server check-database command took longer (~3) minutes to complete.(vbrodetskyi)

Posted by sw...@apache.org.
AMBARI-18988. [Intermittent]Ambari-server check-database command took longer (~3) minutes to complete.(vbrodetskyi)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/8278384a
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/8278384a
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/8278384a

Branch: refs/heads/branch-feature-AMBARI-18901
Commit: 8278384aa6597541abb70dde9b74de17aabb472e
Parents: 7afe172
Author: Vitaly Brodetskyi <vb...@hortonworks.com>
Authored: Fri Nov 25 14:16:43 2016 +0200
Committer: Vitaly Brodetskyi <vb...@hortonworks.com>
Committed: Fri Nov 25 14:16:43 2016 +0200

----------------------------------------------------------------------
 .../server/checks/DatabaseConsistencyCheckHelper.java     | 10 +++++++++-
 1 file changed, 9 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/8278384a/ambari-server/src/main/java/org/apache/ambari/server/checks/DatabaseConsistencyCheckHelper.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/checks/DatabaseConsistencyCheckHelper.java b/ambari-server/src/main/java/org/apache/ambari/server/checks/DatabaseConsistencyCheckHelper.java
index c60bf20..9c3ae5c 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/checks/DatabaseConsistencyCheckHelper.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/checks/DatabaseConsistencyCheckHelper.java
@@ -442,11 +442,13 @@ public class DatabaseConsistencyCheckHelper {
       connection = dbAccessor.getConnection();
     }
 
+    LOG.info("Getting ambari metainfo instance");
     if (ambariMetaInfo == null) {
       ambariMetaInfo = injector.getInstance(AmbariMetaInfo.class);
     }
 
     try {
+      LOG.info("Executing query 'GET_SERVICES_WITHOUT_CONFIGS'");
       statement = connection.createStatement(ResultSet.TYPE_SCROLL_SENSITIVE, ResultSet.CONCUR_UPDATABLE);
 
       rs = statement.executeQuery(GET_SERVICES_WITHOUT_CONFIGS_QUERY);
@@ -461,7 +463,7 @@ public class DatabaseConsistencyCheckHelper {
         }
 
       }
-
+      LOG.info("Executing query 'GET_SERVICE_CONFIG_WITHOUT_MAPPING'");
       rs = statement.executeQuery(GET_SERVICE_CONFIG_WITHOUT_MAPPING_QUERY);
       if (rs != null) {
         String serviceName = null, version = null, clusterName = null;
@@ -491,6 +493,7 @@ public class DatabaseConsistencyCheckHelper {
       }
 
       //get stack info from db
+      LOG.info("Getting stack info from database");
       rs = statement.executeQuery(GET_STACK_NAME_VERSION_QUERY);
       if (rs != null) {
         while (rs.next()) {
@@ -505,6 +508,7 @@ public class DatabaseConsistencyCheckHelper {
       Map<String, Map<Integer, Multimap<String, String>>> dbClusterServiceVersionConfigs = new HashMap<>();
       Multimap<String, String> stackServiceConfigs = HashMultimap.create();
 
+      LOG.info("Executing query 'GET_SERVICES_WITH_CONFIGS'");
       rs = statement.executeQuery(GET_SERVICES_WITH_CONFIGS_QUERY);
       if (rs != null) {
         String serviceName = null, configType = null, clusterName = null;
@@ -539,12 +543,14 @@ public class DatabaseConsistencyCheckHelper {
       }
 
       //compare service configs from stack with configs that we got from db
+      LOG.info("Comparing service configs from stack with configs that we got from db");
       for (Map.Entry<String, Map<String, String>> clusterStackInfoEntry : clusterStackInfo.entrySet()) {
         //collect required configs for all services from stack
         String clusterName = clusterStackInfoEntry.getKey();
         Map<String, String> stackInfo = clusterStackInfoEntry.getValue();
         String stackName = stackInfo.keySet().iterator().next();
         String stackVersion = stackInfo.get(stackName);
+        LOG.info("Getting services from metainfo");
         Map<String, ServiceInfo> serviceInfoMap = ambariMetaInfo.getServices(stackName, stackVersion);
         for (String serviceName : serviceNames) {
           LOG.info("Processing {}-{} / {}", stackName, stackVersion, serviceName);
@@ -562,6 +568,7 @@ public class DatabaseConsistencyCheckHelper {
         }
 
         //compare required service configs from stack with mapped service configs from db
+        LOG.info("Comparing required service configs from stack with mapped service configs from db");
         Map<Integer, Multimap<String, String>> dbServiceVersionConfigs = dbClusterServiceVersionConfigs.get(clusterName);
         if (dbServiceVersionConfigs != null) {
           for (Integer serviceVersion : dbServiceVersionConfigs.keySet()) {
@@ -585,6 +592,7 @@ public class DatabaseConsistencyCheckHelper {
       }
 
       //getting services which has mapped configs which are not selected in clusterconfigmapping
+      LOG.info("Getting services which has mapped configs which are not selected in clusterconfigmapping");
       rs = statement.executeQuery(GET_NOT_SELECTED_SERVICE_CONFIGS_QUERY);
       if (rs != null) {
         String serviceName = null, configType = null, clusterName = null;


[47/50] ambari git commit: AMBARI-18755. Deployment failing at creating principal [addendum] (rlevas)

Posted by sw...@apache.org.
AMBARI-18755. Deployment failing at creating principal [addendum] (rlevas)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/a5ce8230
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/a5ce8230
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/a5ce8230

Branch: refs/heads/branch-feature-AMBARI-18901
Commit: a5ce8230e59d515e6bcbe0db949ef6bfcd447c7a
Parents: 707f07f
Author: Robert Levas <rl...@hortonworks.com>
Authored: Wed Nov 30 19:57:28 2016 -0500
Committer: Robert Levas <rl...@hortonworks.com>
Committed: Wed Nov 30 19:57:28 2016 -0500

----------------------------------------------------------------------
 ambari-server/docs/configuration/index.md       | 25 +++++++++++++-------
 .../server/configuration/Configuration.java     |  7 ++++++
 .../kerberos/MITKerberosOperationHandler.java   | 22 ++++++++++++-----
 .../ambari/server/utils/ShellCommandUtil.java   | 11 ++++++++-
 .../MITKerberosOperationHandlerTest.java        | 23 ++++++++++++++++++
 .../server/utils/TestShellCommandUtil.java      | 13 ++++++----
 6 files changed, 81 insertions(+), 20 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/a5ce8230/ambari-server/docs/configuration/index.md
----------------------------------------------------------------------
diff --git a/ambari-server/docs/configuration/index.md b/ambari-server/docs/configuration/index.md
index 77d3a4d..34dafc2 100644
--- a/ambari-server/docs/configuration/index.md
+++ b/ambari-server/docs/configuration/index.md
@@ -49,15 +49,17 @@ The following are the properties which can be used to configure Ambari.
 | agent.stack.retry.tries | The number of times an Ambari Agent should retry package installation when it fails due to a repository error. <br/><br/> This property is related to `agent.stack.retry.on_repo_unavailability`. |`5` | 
 | agent.task.timeout | The time, in seconds, before agent commands are killed. This does not include package installation commands. |`900` | 
 | agent.threadpool.size.max | The size of the Jetty connection pool used for handling incoming Ambari Agent requests. |`25` | 
+| alerts.ambari.snmp.dispatcher.udp.port | The UDP port to use when binding the Ambari SNMP dispatcher on Ambari Server startup. If no port is specified, then a random port will be used. | | 
 | alerts.cache.enabled | Determines whether current alerts should be cached. Enabling this can increase performance on large cluster, but can also result in lost alert data if the cache is not flushed frequently. |`false` | 
 | alerts.cache.flush.interval | The time, in minutes, after which cached alert information is flushed to the database<br/><br/> This property is related to `alerts.cache.enabled`. |`10` | 
 | alerts.cache.size | The size of the alert cache.<br/><br/> This property is related to `alerts.cache.enabled`. |`50000` | 
 | alerts.execution.scheduler.maxThreads | The number of threads used to handle alerts received from the Ambari Agents. The value should be increased as the size of the cluster increases. |`2` | 
 | alerts.snmp.dispatcher.udp.port | The UDP port to use when binding the SNMP dispatcher on Ambari Server startup. If no port is specified, then a random port will be used. | | 
-| alerts.ambari.snmp.dispatcher.udp.port | The UDP port to use when binding the SNMP dispatcher on Ambari Server startup. If no port is specified, then a random port will be used. | |
-| alerts.template.file | The full path to the XML file that describes the different alert templates. | |
+| alerts.template.file | The full path to the XML file that describes the different alert templates. | | 
 | ambari.display.url | The URL to use when creating messages which should include the Ambari Server URL.<br/><br/>The following are examples of valid values:<ul><li>`http://ambari.apache.org:8080`</ul> | | 
 | ambari.ldap.isConfigured | An internal property used for unit testing and development purposes. |`false` | 
+| ambari.post.user.creation.hook | The location of the post user creation hook on the ambari server hosting machine. |`/var/lib/ambari-server/resources/scripts/post-user-creation-hook.sh` | 
+| ambari.post.user.creation.hook.enabled | Indicates whether the post user creation is enabled or not. By default is false. |`false` | 
 | ambari.python.wrap | The name of the shell script used to wrap all invocations of Python by Ambari.  |`ambari-python-wrap` | 
 | anonymous.audit.name | The name of the user given to requests which are executed without any credentials. |`_anonymous` | 
 | api.authenticated.user | The username of the default user assumed to be executing API calls. When set, authentication is not required in order to login to Ambari or use the REST APIs.   | | 
@@ -105,6 +107,7 @@ The following are the properties which can be used to configure Ambari.
 | authentication.ldap.usernameAttribute | The attribute used for determining the user name, such as `uid`. |`uid` | 
 | authorization.ldap.adminGroupMappingRules | A comma-separate list of groups which would give a user administrative access to Ambari when syncing from LDAP. This is only used when `authorization.ldap.groupSearchFilter` is blank.<br/><br/>The following are examples of valid values:<ul><li>`administrators`<li>`Hadoop Admins,Hadoop Admins.*,DC Admins,.*Hadoop Operators`</ul> |`Ambari Administrators` | 
 | authorization.ldap.groupSearchFilter | The DN to use when searching for LDAP groups. | | 
+| auto.group.creation | The auto group creation by Ambari |`false` | 
 | bootstrap.dir | The directory on the Ambari Server file system used for storing Ambari Agent bootstrap information such as request responses. |`/var/run/ambari-server/bootstrap` | 
 | bootstrap.master_host_name | The host name of the Ambari Server which will be used by the Ambari Agents for communication. | | 
 | bootstrap.script | The location and name of the Python script used to bootstrap new Ambari Agent hosts. |`/usr/lib/python2.6/site-packages/ambari_server/bootstrap.py` | 
@@ -138,6 +141,7 @@ The following are the properties which can be used to configure Ambari.
 | kerberos.check.jaas.configuration | Determines whether Kerberos-enabled Ambari deployments should use JAAS to validate login credentials. |`false` | 
 | kerberos.keytab.cache.dir | The location on the Ambari Server where Kerberos keytabs are cached. |`/var/lib/ambari-server/data/cache` | 
 | kerberos.operation.retries | The number of times failed kerberos operations should be retried to execute. |`3` | 
+| kerberos.operation.retry.timeout | The time to wait (in seconds) between failed kerberos operations retries. |`10` | 
 | ldap.sync.username.collision.behavior | Determines how to handle username collision while updating from LDAP.<br/><br/>The following are examples of valid values:<ul><li>`skip`<li>`convert`</ul> |`convert` | 
 | log4j.monitor.delay | Indicates the delay, in milliseconds, for the log4j monitor to check for changes |`300000` | 
 | metadata.path | The location on the Ambari Server where the stack resources exist.<br/><br/>The following are examples of valid values:<ul><li>`/var/lib/ambari-server/resources/stacks`</ul> | | 
@@ -146,8 +150,10 @@ The following are the properties which can be used to configure Ambari.
 | metrics.retrieval-service.request.ttl.enabled | Enables throttling requests to the same endpoint within a fixed amount of time. This property will prevent Ambari from making new metric requests to update the cache for URLs which have been recently retrieved.<br/><br/> This property is related to `metrics.retrieval-service.request.ttl`. |`true` | 
 | mpacks.staging.path | The Ambari Management Pack staging directory on the Ambari Server.<br/><br/>The following are examples of valid values:<ul><li>`/var/lib/ambari-server/resources/mpacks`</ul> | | 
 | packages.pre.installed | Determines whether Ambari Agent instances have already have the necessary stack software installed |`false` | 
+| pam.configuration | The PAM configuration file. | | 
 | proxy.allowed.hostports | A comma-separated whitelist of host and port values which Ambari Server can use to determine if a proxy value is valid. |`*:*` | 
 | recommendations.artifacts.lifetime | The amount of time that Recommendation API data is kept on the Ambari Server file system. This is specified using a `hdwmy` syntax for pairing the value with a time unit (hours, days, weeks, months, years)<br/><br/>The following are examples of valid values:<ul><li>`8h`<li>`2w`<li>`1m`</ul> |`1w` | 
+| recommendations.artifacts.rollover.max | Maximum number of recommendations artifacts at a given time<br/><br/>The following are examples of valid values:<ul><li>`50`<li>`10`<li>`100`</ul> |`100` | 
 | recommendations.dir | The directory on the Ambari Server file system used for storing Recommendation API artifacts. |`/var/run/ambari-server/stack-recommendations` | 
 | recovery.disabled_components | A comma-separated list of component names which are not included in automatic recovery attempts.<br/><br/>The following are examples of valid values:<ul><li>`NAMENODE,ZOOKEEPER_SERVER`</ul> | | 
 | recovery.enabled_components | A comma-separated list of component names which are included in automatic recovery attempts.<br/><br/>The following are examples of valid values:<ul><li>`NAMENODE,ZOOKEEPER_SERVER`</ul> | | 
@@ -222,16 +228,16 @@ The following are the properties which can be used to configure Ambari.
 | server.jdbc.user.passwd | The password for the user when logging into the database. |`bigdata` | 
 | server.locks.profiling | Enable the profiling of internal locks. |`false` | 
 | server.metrics.retrieval-service.thread.priority | The priority of threads used by the service which retrieves JMX and REST metrics directly from their respective endpoints. |`5` | 
-| server.metrics.retrieval-service.threadpool.size.core | The core number of threads used to retrieve JMX and REST metrics directly from their respective endpoints. |`16` | 
-| server.metrics.retrieval-service.threadpool.size.max | The maximum number of threads used to retrieve JMX and REST metrics directly from their respective endpoints. |`32` | 
-| server.metrics.retrieval-service.threadpool.worker.size | The number of queued requests allowed for JMX and REST metrics before discarding old requests which have not been fullfilled. |`320` | 
+| server.metrics.retrieval-service.threadpool.size.core | The core number of threads used to retrieve JMX and REST metrics directly from their respective endpoints. |`8` | 
+| server.metrics.retrieval-service.threadpool.size.max | The maximum number of threads used to retrieve JMX and REST metrics directly from their respective endpoints. |`16` | 
+| server.metrics.retrieval-service.threadpool.worker.size | The number of queued requests allowed for JMX and REST metrics before discarding old requests which have not been fullfilled. |`160` | 
 | server.operations.retry-attempts | The number of retry attempts for failed API and blueprint operations. |`0` | 
 | server.os_family | The operating system family for all hosts in the cluster. This is used when bootstrapping agents and when enabling Kerberos.<br/><br/>The following are examples of valid values:<ul><li>`redhat`<li>`ubuntu`</ul> | | 
 | server.os_type | The operating system version for all hosts in the cluster. This is used when bootstrapping agents and when enabling Kerberos.<br/><br/>The following are examples of valid values:<ul><li>`6`<li>`7`</ul> | | 
 | server.persistence.type | The type of database connection being used. Unless using an embedded PostgresSQL server, then this should be `remote`.<br/><br/>The following are examples of valid values:<ul><li>`local`<li>`remote`</ul> |`local` | 
 | server.property-provider.threadpool.completion.timeout | The maximum time, in milliseconds, that federated requests for data can execute before being terminated. Increasing this value could result in degraded performanc from the REST APIs. |`5000` | 
-| server.property-provider.threadpool.size.core | The core number of threads that will be used to retrieve data from federated datasources, such as remote JMX endpoints. |`16` | 
-| server.property-provider.threadpool.size.max | The maximum number of threads that will be used to retrieve data from federated datasources, such as remote JMX endpoints. |`32` | 
+| server.property-provider.threadpool.size.core | The core number of threads that will be used to retrieve data from federated datasources, such as remote JMX endpoints. |`8` | 
+| server.property-provider.threadpool.size.max | The maximum number of threads that will be used to retrieve data from federated datasources, such as remote JMX endpoints. |`16` | 
 | server.property-provider.threadpool.worker.size | The maximum size of pending federated datasource requests, such as those to JMX endpoints, which can be queued before rejecting new requests. |`2147483647` | 
 | server.script.timeout | The time, in milliseconds, until an external script is killed. |`5000` | 
 | server.stage.command.execution_type | How to execute commands in one stage |`STAGE` | 
@@ -262,6 +268,9 @@ The following are the properties which can be used to configure Ambari.
 | stack.upgrade.bypass.prechecks | Determines whether pre-upgrade checks will be skipped when performing a rolling or express stack upgrade. |`false` | 
 | stackadvisor.script | The location and name of the Python stack advisor script executed when configuring services. |`/var/lib/ambari-server/resources/scripts/stack_advisor.py` | 
 | task.query.parameterlist.size | The maximum number of tasks which can be queried by ID from the database. |`999` | 
+| topology.task.creation.parallel | Indicates whether parallel topology task creation is enabled |`false` | 
+| topology.task.creation.parallel.threads | The number of threads to use for parallel topology task creation if enabled |`10` | 
+| view.extract-after-cluster-config | Drives view extraction in case of blueprint deployments; non-system views are deployed when cluster configuration is successful |`false` | 
 | view.extraction.threadpool.size.core | The number of threads used to extract Ambari Views when Ambari Server is starting up. |`10` | 
 | view.extraction.threadpool.size.max | The maximum number of threads used to extract Ambari Views when Ambari Server is starting up. |`20` | 
 | view.extraction.threadpool.timeout | The time, in milliseconds, that non-core threads will live when extraction views on Ambari Server startup. |`100000` | 
@@ -316,4 +325,4 @@ EclipseLink properties can also be configured using a prefix of `server.persiste
 ```
 server.persistence.properties.eclipselink.jdbc.batch-writing.size=25
 server.persistence.properties.eclipselink.profiler=QueryMonitor
-```
+```
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/a5ce8230/ambari-server/src/main/java/org/apache/ambari/server/configuration/Configuration.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/configuration/Configuration.java b/ambari-server/src/main/java/org/apache/ambari/server/configuration/Configuration.java
index ef35ce8..8f10023 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/configuration/Configuration.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/configuration/Configuration.java
@@ -1407,6 +1407,9 @@ public class Configuration {
   public static final ConfigurationProperty<Integer> KERBEROS_OPERATION_RETRIES = new ConfigurationProperty<>(
       "kerberos.operation.retries", 3);
 
+  @Markdown(description = "The time to wait (in seconds) between failed kerberos operations retries.")
+  public static final ConfigurationProperty<Integer> KERBEROS_OPERATION_RETRY_TIMEOUT = new ConfigurationProperty<>(
+      "kerberos.operation.retry.timeout", 10);
   /**
    * The type of connection pool to use with JDBC connections to the database.
    */
@@ -5611,6 +5614,10 @@ public class Configuration {
     return Integer.valueOf(getProperty(KERBEROS_OPERATION_RETRIES));
   }
 
+  public int getKerberosOperationRetryTimeout() {
+    return Integer.valueOf(getProperty(KERBEROS_OPERATION_RETRY_TIMEOUT));
+  }
+
   /**
    * Return configured acceptors for agent api connector. Default = null
    */

http://git-wip-us.apache.org/repos/asf/ambari/blob/a5ce8230/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/MITKerberosOperationHandler.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/MITKerberosOperationHandler.java b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/MITKerberosOperationHandler.java
index 2f756fb..4b20973 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/MITKerberosOperationHandler.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/MITKerberosOperationHandler.java
@@ -473,9 +473,11 @@ public class MITKerberosOperationHandler extends KerberosOperationHandler {
       }
       tries++;
 
-      try { Thread.sleep(3000); } catch (InterruptedException e) {}
+      try { Thread.sleep(1000 * configuration.getKerberosOperationRetryTimeout()); } catch (InterruptedException e) {}
 
-      String message = String.format("Retrying to execute kadmin after a wait of 3 seconds :\n\tCommand: %s", command);
+      String message = String.format("Retrying to execute kadmin after a wait of %d seconds :\n\tCommand: %s",
+          configuration.getKerberosOperationRetryTimeout(),
+          command);
       LOG.warn(message);
     }
 
@@ -521,8 +523,8 @@ public class MITKerberosOperationHandler extends KerberosOperationHandler {
     /**
      * The queue of responses to return
      */
-    private final Queue<String> responses = new LinkedList<String>();
-
+    private LinkedList<String> responses;
+    private Queue<String> currentResponses;
 
     /**
      * Constructor.
@@ -531,6 +533,7 @@ public class MITKerberosOperationHandler extends KerberosOperationHandler {
      * @param userPassword  the user's password (optional)
      */
     public InteractivePasswordHandler(String adminPassword, String userPassword) {
+      responses = new LinkedList<String>();
 
       if (adminPassword != null) {
         responses.offer(adminPassword);
@@ -540,16 +543,23 @@ public class MITKerberosOperationHandler extends KerberosOperationHandler {
         responses.offer(userPassword);
         responses.offer(userPassword);  // Add a 2nd time for the password "confirmation" request
       }
+
+      currentResponses = new LinkedList<String>(responses);
     }
 
     @Override
     public boolean done() {
-      return responses.size() == 0;
+      return currentResponses.size() == 0;
     }
 
     @Override
     public String getResponse(String query) {
-      return responses.poll();
+      return currentResponses.poll();
+    }
+
+    @Override
+    public void start() {
+      currentResponses = new LinkedList<String>(responses);
     }
   }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/a5ce8230/ambari-server/src/main/java/org/apache/ambari/server/utils/ShellCommandUtil.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/utils/ShellCommandUtil.java b/ambari-server/src/main/java/org/apache/ambari/server/utils/ShellCommandUtil.java
index 99f47c5..3152a0c 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/utils/ShellCommandUtil.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/utils/ShellCommandUtil.java
@@ -434,6 +434,8 @@ public class ShellCommandUtil {
       BufferedWriter writer = new BufferedWriter(new OutputStreamWriter(process.getOutputStream()));
       BufferedReader reader = new BufferedReader(new InputStreamReader(inputStream));
 
+      interactiveHandler.start();
+
       while (!interactiveHandler.done()) {
         StringBuilder query = new StringBuilder();
 
@@ -546,11 +548,18 @@ public class ShellCommandUtil {
     boolean done();
 
     /**
-     * Gnven a query, returns the relative response to send to the shell command (via stdin)
+     * Given a query, returns the relative response to send to the shell command (via stdin)
      *
      * @param query a string containing the query that needs a response
      * @return a string or null if no response is needed
      */
     String getResponse(String query);
+
+    /**
+     * Starts or resets this handler.
+     * <p>
+     * It is expected that the caller calls this before using handler.
+     */
+    void start();
   }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/a5ce8230/ambari-server/src/test/java/org/apache/ambari/server/serveraction/kerberos/MITKerberosOperationHandlerTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/serveraction/kerberos/MITKerberosOperationHandlerTest.java b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/kerberos/MITKerberosOperationHandlerTest.java
index 4c40a5d..39f4201 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/serveraction/kerberos/MITKerberosOperationHandlerTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/kerberos/MITKerberosOperationHandlerTest.java
@@ -70,6 +70,7 @@ public class MITKerberosOperationHandlerTest extends KerberosOperationHandlerTes
       protected void configure() {
         Configuration configuration = EasyMock.createNiceMock(Configuration.class);
         expect(configuration.getServerOsFamily()).andReturn("redhat6").anyTimes();
+        expect(configuration.getKerberosOperationRetryTimeout()).andReturn(1).anyTimes();
         replay(configuration);
 
         bind(Clusters.class).toInstance(EasyMock.createNiceMock(Clusters.class));
@@ -547,6 +548,28 @@ public class MITKerberosOperationHandlerTest extends KerberosOperationHandlerTes
     handler.close();
   }
 
+  @Test
+  public void testInteractivePasswordHandler() {
+    MITKerberosOperationHandler.InteractivePasswordHandler handler = new MITKerberosOperationHandler.InteractivePasswordHandler("admin_password", "user_password");
+
+    handler.start();
+    Assert.assertEquals("admin_password", handler.getResponse("password"));
+    Assert.assertFalse(handler.done());
+    Assert.assertEquals("user_password", handler.getResponse("password"));
+    Assert.assertFalse(handler.done());
+    Assert.assertEquals("user_password", handler.getResponse("password"));
+    Assert.assertTrue(handler.done());
+
+    // Test restarting
+    handler.start();
+    Assert.assertEquals("admin_password", handler.getResponse("password"));
+    Assert.assertFalse(handler.done());
+    Assert.assertEquals("user_password", handler.getResponse("password"));
+    Assert.assertFalse(handler.done());
+    Assert.assertEquals("user_password", handler.getResponse("password"));
+    Assert.assertTrue(handler.done());
+  }
+
   private MITKerberosOperationHandler createMock(){
     return createMock(false);
   }

http://git-wip-us.apache.org/repos/asf/ambari/blob/a5ce8230/ambari-server/src/test/java/org/apache/ambari/server/utils/TestShellCommandUtil.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/utils/TestShellCommandUtil.java b/ambari-server/src/test/java/org/apache/ambari/server/utils/TestShellCommandUtil.java
index 34ac91f..24af9bd 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/utils/TestShellCommandUtil.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/utils/TestShellCommandUtil.java
@@ -126,17 +126,20 @@ public class TestShellCommandUtil {
 
       @Override
       public String getResponse(String query) {
-        if(query.contains("Arg1")) {
+        if (query.contains("Arg1")) {
           return "a1";
-        }
-        else if(query.contains("Arg2")) {
+        } else if (query.contains("Arg2")) {
           done = true; // this is the last expected prompt
           return "a2";
-        }
-        else {
+        } else {
           return null;
         }
       }
+
+      @Override
+      public void start() {
+
+      }
     };
 
     ShellCommandUtil.Result result = ShellCommandUtil.runCommand(new String[]{"./src/test/resources/interactive_shell_test.sh"}, null, interactiveHandler, false);


[21/50] ambari git commit: AMBARI-19001. Support configurable grok filters (output + ambari) (oleewere)

Posted by sw...@apache.org.
AMBARI-19001. Support configurable grok filters (output + ambari) (oleewere)

Change-Id: Ibf02fc6ca6759435ac14dc59de9f71782d7ea709


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/c295941b
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/c295941b
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/c295941b

Branch: refs/heads/branch-feature-AMBARI-18901
Commit: c295941b7259b0deb1e452522ceeca2d3e9d7d4a
Parents: 1fca37d
Author: oleewere <ol...@gmail.com>
Authored: Tue Nov 29 13:46:32 2016 +0100
Committer: oleewere <ol...@gmail.com>
Committed: Tue Nov 29 15:54:26 2016 +0100

----------------------------------------------------------------------
 .../configuration/logfeeder-ambari-config.xml   |  37 ++
 .../configuration/logfeeder-output-config.xml   |  37 ++
 .../LOGSEARCH/0.5.0/metainfo.xml                |   5 +
 .../LOGSEARCH/0.5.0/package/scripts/params.py   |  17 +-
 .../0.5.0/package/scripts/setup_logfeeder.py    |  10 +
 .../templates/input.config-ambari.json.j2       | 602 -------------------
 .../package/templates/output.config.json.j2     |  61 --
 .../properties/input.config-ambari.json.j2      | 602 +++++++++++++++++++
 .../0.5.0/properties/output.config.json.j2      |  61 ++
 .../stacks/2.4/LOGSEARCH/test_logfeeder.py      |  14 +-
 .../test/python/stacks/2.4/configs/default.json |   6 +
 11 files changed, 780 insertions(+), 672 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/c295941b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/configuration/logfeeder-ambari-config.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/configuration/logfeeder-ambari-config.xml b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/configuration/logfeeder-ambari-config.xml
new file mode 100644
index 0000000..64d7946
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/configuration/logfeeder-ambari-config.xml
@@ -0,0 +1,37 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+<!--
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+-->
+<configuration supports_final="false" supports_adding_forbidden="true">
+  <property>
+    <name>content</name>
+    <display-name>Ambari LogFeeder config</display-name>
+    <description>Grok filters and input file locations for ambari related log files</description>
+    <value/>
+    <property-type>VALUE_FROM_PROPERTY_FILE</property-type>
+    <value-attributes>
+      <type>content</type>
+      <show-property-name>false</show-property-name>
+      <property-file-name>input.config-ambari.json.j2</property-file-name>
+      <property-file-type>text</property-file-type>
+    </value-attributes>
+    <on-ambari-upgrade add="true"/>
+  </property>
+</configuration>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/c295941b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/configuration/logfeeder-output-config.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/configuration/logfeeder-output-config.xml b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/configuration/logfeeder-output-config.xml
new file mode 100644
index 0000000..85654c8
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/configuration/logfeeder-output-config.xml
@@ -0,0 +1,37 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+<!--
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+-->
+<configuration supports_final="false" supports_adding_forbidden="true">
+  <property>
+    <name>content</name>
+    <display-name>LogFeeder output config</display-name>
+    <description>Output configuration for LogFeeder (data shipping)</description>
+    <value/>
+    <property-type>VALUE_FROM_PROPERTY_FILE</property-type>
+    <value-attributes>
+      <type>content</type>
+      <show-property-name>false</show-property-name>
+      <property-file-name>output.config.json.j2</property-file-name>
+      <property-file-type>text</property-file-type>
+    </value-attributes>
+    <on-ambari-upgrade add="true"/>
+  </property>
+</configuration>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/c295941b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/metainfo.xml b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/metainfo.xml
index 6d1cf38..f69bd3c 100644
--- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/metainfo.xml
+++ b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/metainfo.xml
@@ -72,6 +72,7 @@
             <config-type>logsearch-admin-json</config-type>
             <config-type>logsearch-service_logs-solrconfig.xml</config-type>
             <config-type>logsearch-audit_logs-solrconfig.xml</config-type>
+            <config-type>logfeeder-custom-logsearch-config</config-type>
           </configuration-dependencies>
         </component>
 
@@ -111,6 +112,10 @@
             <config-type>logfeeder-env</config-type>
             <config-type>logfeeder-grok</config-type>
             <config-type>logfeeder-log4j</config-type>
+            <config-type>logfeeder-system_log-env</config-type>
+            <config-type>logfeeder-ambari-config</config-type>
+            <config-type>logfeeder-output-config</config-type>
+            <config-type>logfeeder-custom-logsearch-config</config-type>
           </configuration-dependencies>
         </component>
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/c295941b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/params.py b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/params.py
index bdb0c10..d086f9f 100644
--- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/params.py
@@ -251,16 +251,19 @@ logfeeder_truststore_location = config['configurations']['logfeeder-env']['logfe
 logfeeder_truststore_password = config['configurations']['logfeeder-env']['logfeeder_truststore_password']
 logfeeder_truststore_type = config['configurations']['logfeeder-env']['logfeeder_truststore_type']
 
-logfeeder_default_services = ['ambari', 'logsearch']
-logfeeder_default_config_file_names = ['global.config.json', 'output.config.json'] + ['input.config-%s.json' % (tag) for tag in logfeeder_default_services]
+logfeeder_ambari_config_content = config['configurations']['logfeeder-ambari-config']['content']
+logfeeder_output_config_content = config['configurations']['logfeeder-output-config']['content']
+
+logfeeder_default_services = ['logsearch']
+logfeeder_default_config_file_names = ['global.config.json'] + ['input.config-%s.json' % (tag) for tag in logfeeder_default_services]
 logfeeder_custom_config_file_names = ['input.config-%s.json' % (tag.replace('-logsearch-conf', ''))
                                       for tag, content in logfeeder_metadata.iteritems() if any(logfeeder_metadata)]
 
 if logfeeder_system_log_enabled:
-  default_config_files = ','.join(logfeeder_default_config_file_names + logfeeder_custom_config_file_names
+  default_config_files = ','.join(['output.json','input.config-ambari.json'] + logfeeder_default_config_file_names + logfeeder_custom_config_file_names
                                   + ['input.config-system_messages.json', 'input.config-secure_log.json'])
 else:
-  default_config_files = ','.join(logfeeder_default_config_file_names + logfeeder_custom_config_file_names)
+  default_config_files = ','.join(['output.config.json','input.config-ambari.json'] + logfeeder_default_config_file_names + logfeeder_custom_config_file_names)
 
 
 logfeeder_grok_patterns = config['configurations']['logfeeder-grok']['default_grok_patterns']
@@ -293,8 +296,10 @@ logfeeder_properties['logfeeder.config.files'] = format(logfeeder_properties['lo
 logfeeder_properties['logfeeder.solr.zk_connect_string'] = zookeeper_quorum + infra_solr_znode
 
 if security_enabled:
-  logfeeder_properties['logfeeder.solr.kerberos.enable'] = 'true'
-  logfeeder_properties['logfeeder.solr.jaas.file'] = logfeeder_jaas_file
+  if 'logfeeder.solr.kerberos.enable' not in logfeeder_properties:
+    logfeeder_properties['logfeeder.solr.kerberos.enable'] = 'true'
+  if 'logfeeder.solr.jaas.file' not in logfeeder_properties:
+    logfeeder_properties['logfeeder.solr.jaas.file'] = logfeeder_jaas_file
 
 logfeeder_checkpoint_folder = logfeeder_properties['logfeeder.checkpoint.folder']
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/c295941b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/setup_logfeeder.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/setup_logfeeder.py b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/setup_logfeeder.py
index 021c167..9582334 100644
--- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/setup_logfeeder.py
+++ b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/setup_logfeeder.py
@@ -62,6 +62,16 @@ def setup_logfeeder():
        encoding="utf-8"
        )
 
+  File(format("{logsearch_logfeeder_conf}/input.config-ambari.json"),
+       content=InlineTemplate(params.logfeeder_ambari_config_content),
+       encoding="utf-8"
+       )
+
+  File(format("{logsearch_logfeeder_conf}/output.config.json"),
+       content=InlineTemplate(params.logfeeder_output_config_content),
+       encoding="utf-8"
+       )
+
   for file_name in params.logfeeder_default_config_file_names:
     File(format("{logsearch_logfeeder_conf}/" + file_name),
          content=Template(file_name + ".j2")

http://git-wip-us.apache.org/repos/asf/ambari/blob/c295941b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-ambari.json.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-ambari.json.j2 b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-ambari.json.j2
deleted file mode 100644
index ad4adb2..0000000
--- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-ambari.json.j2
+++ /dev/null
@@ -1,602 +0,0 @@
-{#
- # Licensed to the Apache Software Foundation (ASF) under one
- # or more contributor license agreements.  See the NOTICE file
- # distributed with this work for additional information
- # regarding copyright ownership.  The ASF licenses this file
- # to you under the Apache License, Version 2.0 (the
- # "License"); you may not use this file except in compliance
- # with the License.  You may obtain a copy of the License at
- #
- #   http://www.apache.org/licenses/LICENSE-2.0
- #
- # Unless required by applicable law or agreed to in writing, software
- # distributed under the License is distributed on an "AS IS" BASIS,
- # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- # See the License for the specific language governing permissions and
- # limitations under the License.
- #}
-{
-  "input":[
-    {
-      "type":"ambari_agent",
-      "rowtype":"service",
-      "path":"{{ambari_agent_log_dir}}/ambari-agent.log"
-    },
-    {
-      "type":"ambari_server",
-      "rowtype":"service",
-      "path":"{{ambari_server_log_dir}}/ambari-server.log"
-    },
-    {
-      "type":"ambari_alerts",
-      "rowtype":"service",
-      "add_fields":{
-        "level":"UNKNOWN"
-      },
-      "path":"{{ambari_server_log_dir}}/ambari-alerts.log"
-    },
-    {
-      "type":"ambari_config_changes",
-      "rowtype":"service",
-      "path":"{{ambari_server_log_dir}}/ambari-config-changes.log"
-    },
-    {
-      "type":"ambari_eclipselink",
-      "rowtype":"service",
-      "path":"{{ambari_server_log_dir}}/ambari-eclipselink.log"
-    },
-    {
-      "type":"ambari_server_check_database",
-      "rowtype":"service",
-      "path":"{{ambari_server_log_dir}}/ambari-server-check-database.log"
-    },
-    {
-      "type":"ambari_audit",
-      "rowtype":"audit",
-      "add_fields":{
-        "logType":"AmbariAudit",
-        "enforcer":"ambari-acl",
-        "repoType":"1",
-        "repo":"ambari",
-        "level":"INFO"
-      },
-      "path":"{{ambari_server_log_dir}}/ambari-audit.log"
-    }
-
-  ],
-  "filter":[
-    {
-      "filter":"grok",
-      "conditions":{
-        "fields":{
-          "type":[
-            "ambari_agent"
-          ]
-
-        }
-
-      },
-      "log4j_format":"",
-      "multiline_pattern":"^(%{LOGLEVEL:level} %{TIMESTAMP_ISO8601:logtime})",
-      "message_pattern":"(?m)^%{LOGLEVEL:level} %{TIMESTAMP_ISO8601:logtime} %{JAVAFILE:file}:%{INT:line_number} - %{GREEDYDATA:log_message}",
-      "post_map_values":{
-        "logtime":{
-          "map_date":{
-            "target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
-          }
-
-        },
-        "level":{
-          "map_fieldvalue":{
-            "pre_value":"WARNING",
-            "post_value":"WARN"
-          }
-
-        }
-
-      }
-
-    },
-    {
-      "filter":"grok",
-      "conditions":{
-        "fields":{
-          "type":[
-            "ambari_server"
-          ]
-          
-        }
-        
-      },
-      "log4j_format":"%d{DATE} %5p [%t] %c{1}:%L - %m%n",
-      "multiline_pattern":"^(%{USER_SYNC_DATE:logtime})",
-      "message_pattern":"(?m)^%{USER_SYNC_DATE:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}\\[%{DATA:thread_name}\\]%{SPACE}%{JAVACLASS:logger_name}:%{INT:line_number}%{SPACE}-%{SPACE}%{GREEDYDATA:log_message}",
-      "post_map_values":{
-        "logtime":{
-          "map_date":{
-            "target_date_pattern":"dd MMM yyyy HH:mm:ss"
-          }
-
-        }
-
-      }
-
-    },
-    {
-      "filter":"grok",
-      "conditions":{
-        "fields":{
-          "type":[
-            "ambari_alerts"
-          ]
-          
-        }
-        
-      },
-      "log4j_format":"%d{DATE} %5p [%t] %c{1}:%L - %m%n",
-      "multiline_pattern":"^(%{TIMESTAMP_ISO8601:logtime})",
-      "message_pattern":"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{GREEDYDATA:log_message}",
-      "post_map_values":{
-        "logtime":{
-          "map_date":{
-            "target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
-          }
-
-        }
-
-      }
-
-    },
-    {
-      "filter":"grok",
-      "conditions":{
-        "fields":{
-          "type":[
-            "ambari_config_changes"
-          ]
-          
-        }
-        
-      },
-      "log4j_format":"%d{DATE} %5p [%t] %c{1}:%L - %m%n",
-      "multiline_pattern":"^(%{TIMESTAMP_ISO8601:logtime})",
-      "message_pattern":"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}-%{SPACE}%{GREEDYDATA:log_message}",
-      "post_map_values":{
-        "logtime":{
-          "map_date":{
-            "target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
-          }
-
-        }
-
-      }
-
-    },
-    {
-      "filter":"grok",
-      "conditions":{
-        "fields":{
-          "type":[
-            "ambari_eclipselink"
-          ]
-          
-        }
-        
-      },
-      "log4j_format":"%d{DATE} %5p [%t] %c{1}:%L - %m%n",
-      "multiline_pattern":"^(\\[EL%{SPACE}%{LOGLEVEL:level}\\])",
-      "message_pattern":"(?m)^\\[EL%{SPACE}%{LOGLEVEL:level}\\]:%{SPACE}%{TIMESTAMP_ISO8601:logtime}%{GREEDYDATA:log_message}",
-      "post_map_values":{
-        "logtime":{
-          "map_date":{
-            "target_date_pattern":"yyyy-MM-dd HH:mm:ss.SSS"
-          }
-
-        },
-        "level":{
-          "map_fieldvalue":{
-            "pre_value":"Warning",
-            "post_value":"Warn"
-          }
-
-        }
-
-      }
-
-    },
-    {
-      "filter":"grok",
-      "conditions":{
-        "fields":{
-          "type":[
-            "ambari_server_check_database"
-          ]
-          
-        }
-        
-      },
-      "log4j_format":"%d{DATE} %5p [%t] %c{1}:%L - %m%n",
-      "multiline_pattern":"^(%{TIMESTAMP_ISO8601:logtime})",
-      "message_pattern":"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}-%{SPACE}%{GREEDYDATA:log_message}",
-      "post_map_values":{
-        "logtime":{
-          "map_date":{
-            "target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
-          }
-
-        }
-
-      }
-
-    },
-    {
-      "filter":"grok",
-      "conditions":{
-        "fields":{
-          "type":[
-            "ambari_audit"
-          ]
-
-        }
-
-      },
-      "log4j_format":"%d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n",
-      "multiline_pattern":"^(%{TIMESTAMP_ISO8601:evtTime})",
-      "message_pattern":"(?m)^%{TIMESTAMP_ISO8601:evtTime},%{SPACE}%{GREEDYDATA:log_message}",
-      "post_map_values":{
-        "evtTime":{
-          "map_date":{
-            "target_date_pattern":"yyyy-MM-dd'T'HH:mm:ss.SSSXX"
-          }
-
-        }
-
-      }
-
-    },
-    {
-      "filter":"keyvalue",
-      "sort_order":1,
-      "conditions":{
-        "fields":{
-          "type":[
-            "ambari_audit"
-          ]
-
-        }
-
-      },
-      "source_field":"log_message",
-      "field_split":", ",
-      "value_borders":"()",
-      "post_map_values":{
-        "User":{
-          "map_fieldvalue":{
-            "pre_value":"null",
-            "post_value":"unknown"
-          },
-          "map_fieldname":{
-            "new_fieldname":"reqUser"
-          }
-        },
-        "Hostname":{
-          "map_fieldname":{
-            "new_fieldname":"host"
-          }
-        },
-        "Host name":{
-          "map_fieldname":{
-            "new_fieldname":"host"
-          }
-        },
-        "RemoteIp":{
-          "map_fieldname":{
-            "new_fieldname":"cliIP"
-          }
-        },
-        "RequestType":{
-          "map_fieldname":{
-            "new_fieldname":"cliType"
-          }
-        },
-        "RequestId":{
-          "map_fieldname":{
-            "new_fieldname":"request_id"
-          }
-        },
-        "TaskId":{
-          "map_fieldname":{
-            "new_fieldname":"task_id"
-          }
-        },
-        "Operation":{
-          "map_fieldname":{
-            "new_fieldname":"action"
-          }
-        },
-        "url":{
-          "map_fieldname":{
-            "new_fieldname":"resource"
-          }
-        },
-        "ResourcePath":{
-          "map_fieldname":{
-            "new_fieldname":"resource"
-          }
-        },
-        "Cluster name":{
-          "map_fieldname":{
-            "new_fieldname":"cluster"
-          }
-        },
-        "Reason":{
-          "map_fieldname":{
-            "new_fieldname":"reason"
-          }
-        },
-        "Base URL":{
-          "map_fieldname":{
-            "new_fieldname":"ws_base_url"
-          }
-        },
-        "Command":{
-          "map_fieldvalue":{
-            "pre_value":"null",
-            "post_value":"unknown"
-          },
-          "map_fieldname":{
-            "new_fieldname":"ws_command"
-          }
-        },
-        "Component":{
-          "map_fieldname":{
-            "new_fieldname":"ws_component"
-          }
-        },
-        "Details":{
-          "map_fieldname":{
-            "new_fieldname":"ws_details"
-          }
-        },
-        "Display name":{
-          "map_fieldvalue":{
-            "pre_value":"null",
-            "post_value":"unknown"
-          },
-          "map_fieldname":{
-            "new_fieldname":"ws_display_name"
-          }
-        },
-        "OS":{
-          "map_fieldname":{
-            "new_fieldname":"ws_os"
-          }
-        },
-        "Repo id":{
-          "map_fieldname":{
-            "new_fieldname":"ws_repo_id"
-          }
-        },
-        "Repo version":{
-          "map_fieldvalue":{
-            "pre_value":"null",
-            "post_value":"unknown"
-          },
-          "map_fieldname":{
-            "new_fieldname":"ws_repo_version"
-          }
-        },
-        "Repositories":{
-          "map_fieldname":{
-            "new_fieldname":"ws_repositories"
-          }
-        },
-        "Roles":{
-          "map_fieldname":{
-            "new_fieldname":"ws_roles"
-          }
-        },
-        "Stack":{
-          "map_fieldname":{
-            "new_fieldname":"ws_stack"
-          }
-        },
-        "Stack version":{
-          "map_fieldname":{
-            "new_fieldname":"ws_stack_version"
-          }
-        },
-        "VersionNote":{
-          "map_fieldvalue":{
-            "pre_value":"null",
-            "post_value":"unknown"
-          },
-          "map_fieldname":{
-            "new_fieldname":"ws_version_note"
-          }
-        },
-        "VersionNumber":{
-          "map_fieldvalue":{
-            "pre_value":"Vnull",
-            "post_value":"unknown"
-          },
-          "map_fieldname":{
-            "new_fieldname":"ws_version_number"
-          }
-        },
-        "Status":[
-          {
-            "map_fieldcopy":{
-              "copy_name": "ws_status"
-            }
-          },
-          {
-            "map_fieldvalue":{
-              "pre_value":"Success",
-              "post_value":"1"
-            }
-          },
-          {
-            "map_fieldvalue":{
-              "pre_value":"Successfully queued",
-              "post_value":"1"
-            }
-          },
-          {
-            "map_fieldvalue":{
-              "pre_value":"QUEUED",
-              "post_value":"1"
-            }
-          },
-          {
-            "map_fieldvalue":{
-              "pre_value":"PENDING",
-              "post_value":"1"
-            }
-          },
-          {
-            "map_fieldvalue":{
-              "pre_value":"COMPLETED",
-              "post_value":"1"
-            }
-          },
-          {
-            "map_fieldvalue":{
-              "pre_value":"IN_PROGRESS",
-              "post_value":"1"
-            }
-          },
-          {
-            "map_fieldvalue":{
-              "pre_value":"Failed",
-              "post_value":"0"
-            }
-          },
-          {
-            "map_fieldvalue":{
-              "pre_value":"Failed to queue",
-              "post_value":"0"
-            }
-          },
-          {
-            "map_fieldvalue":{
-              "pre_value":"HOLDING",
-              "post_value":"0"
-            }
-          },
-          {
-            "map_fieldvalue":{
-              "pre_value":"HOLDING_FAILED",
-              "post_value":"0"
-            }
-          },
-          {
-            "map_fieldvalue":{
-              "pre_value":"HOLDING_TIMEDOUT",
-              "post_value":"0"
-            }
-          },
-          {
-            "map_fieldvalue":{
-              "pre_value":"FAILED",
-              "post_value":"0"
-            }
-          },
-          {
-            "map_fieldvalue":{
-              "pre_value":"TIMEDOUT",
-              "post_value":"0"
-            }
-          },
-          {
-            "map_fieldvalue":{
-              "pre_value":"ABORTED",
-              "post_value":"0"
-            }
-          },
-          {
-            "map_fieldvalue":{
-              "pre_value":"SKIPPED_FAILED",
-              "post_value":"0"
-            }
-          },
-          {
-            "map_fieldname":{
-              "new_fieldname":"result"
-            }
-          }
-        ],
-        "ResultStatus":[
-          {
-            "map_fieldcopy":{
-              "copy_name": "ws_result_status"
-            }
-          },
-          {
-            "map_fieldvalue":{
-              "pre_value":"200 OK",
-              "post_value":"1"
-            }
-          },
-          {
-            "map_fieldvalue":{
-              "pre_value":"201 Created",
-              "post_value":"1"
-            }
-          },
-          {
-            "map_fieldvalue":{
-              "pre_value":"202 Accepted",
-              "post_value":"1"
-            }
-          },
-          {
-            "map_fieldvalue":{
-              "pre_value":"400 Bad Request",
-              "post_value":"0"
-            }
-          },
-          {
-            "map_fieldvalue":{
-              "pre_value":"401 Unauthorized",
-              "post_value":"0"
-            }
-          },
-          {
-            "map_fieldvalue":{
-              "pre_value":"403 Forbidden",
-              "post_value":"0"
-            }
-          },
-          {
-            "map_fieldvalue":{
-              "pre_value":"404 Not Found",
-              "post_value":"0"
-            }
-          },
-          {
-            "map_fieldvalue":{
-              "pre_value":"409 Resource Conflict",
-              "post_value":"0"
-            }
-          },
-          {
-            "map_fieldvalue":{
-              "pre_value":"500 Internal Server Error",
-              "post_value":"0"
-            }
-          },
-          {
-            "map_fieldname":{
-              "new_fieldname":"result"
-            }
-          }
-        ]
-
-      }
-
-    }
-
-  ]
-
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/c295941b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/output.config.json.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/output.config.json.j2 b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/output.config.json.j2
deleted file mode 100644
index ff43323..0000000
--- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/output.config.json.j2
+++ /dev/null
@@ -1,61 +0,0 @@
-{#
- # Licensed to the Apache Software Foundation (ASF) under one
- # or more contributor license agreements.  See the NOTICE file
- # distributed with this work for additional information
- # regarding copyright ownership.  The ASF licenses this file
- # to you under the Apache License, Version 2.0 (the
- # "License"); you may not use this file except in compliance
- # with the License.  You may obtain a copy of the License at
- #
- #   http://www.apache.org/licenses/LICENSE-2.0
- #
- # Unless required by applicable law or agreed to in writing, software
- # distributed under the License is distributed on an "AS IS" BASIS,
- # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- # See the License for the specific language governing permissions and
- # limitations under the License.
- #}
-{
-  "output":[
-    {
-      "comment":"Output to solr for service logs",
-      "is_enabled":"{{solr_service_logs_enable}}",
-      "destination":"solr",
-      "zk_connect_string":"{{zookeeper_quorum}}{{infra_solr_znode}}",
-      "collection":"{{logsearch_solr_collection_service_logs}}",
-      "number_of_shards": "{{logsearch_collection_service_logs_numshards}}",
-      "splits_interval_mins": "{{logsearch_service_logs_split_interval_mins}}",
-      "conditions":{
-        "fields":{
-          "rowtype":[
-            "service"
-          ]
-
-        }
-
-      }
-
-    },
-    {
-      "comment":"Output to solr for audit records",
-      "is_enabled":"{{solr_audit_logs_enable}}",
-      "destination":"solr",
-      "zk_connect_string":"{{zookeeper_quorum}}{{infra_solr_znode}}",
-      "collection":"{{logsearch_solr_collection_audit_logs}}",
-      "number_of_shards": "{{logsearch_collection_audit_logs_numshards}}",
-      "splits_interval_mins": "{{logsearch_audit_logs_split_interval_mins}}",
-      "conditions":{
-        "fields":{
-          "rowtype":[
-            "audit"
-          ]
-
-        }
-
-      }
-
-    }
-
-  ]
-  
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/c295941b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/properties/input.config-ambari.json.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/properties/input.config-ambari.json.j2 b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/properties/input.config-ambari.json.j2
new file mode 100644
index 0000000..ad4adb2
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/properties/input.config-ambari.json.j2
@@ -0,0 +1,602 @@
+{#
+ # Licensed to the Apache Software Foundation (ASF) under one
+ # or more contributor license agreements.  See the NOTICE file
+ # distributed with this work for additional information
+ # regarding copyright ownership.  The ASF licenses this file
+ # to you under the Apache License, Version 2.0 (the
+ # "License"); you may not use this file except in compliance
+ # with the License.  You may obtain a copy of the License at
+ #
+ #   http://www.apache.org/licenses/LICENSE-2.0
+ #
+ # Unless required by applicable law or agreed to in writing, software
+ # distributed under the License is distributed on an "AS IS" BASIS,
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ # See the License for the specific language governing permissions and
+ # limitations under the License.
+ #}
+{
+  "input":[
+    {
+      "type":"ambari_agent",
+      "rowtype":"service",
+      "path":"{{ambari_agent_log_dir}}/ambari-agent.log"
+    },
+    {
+      "type":"ambari_server",
+      "rowtype":"service",
+      "path":"{{ambari_server_log_dir}}/ambari-server.log"
+    },
+    {
+      "type":"ambari_alerts",
+      "rowtype":"service",
+      "add_fields":{
+        "level":"UNKNOWN"
+      },
+      "path":"{{ambari_server_log_dir}}/ambari-alerts.log"
+    },
+    {
+      "type":"ambari_config_changes",
+      "rowtype":"service",
+      "path":"{{ambari_server_log_dir}}/ambari-config-changes.log"
+    },
+    {
+      "type":"ambari_eclipselink",
+      "rowtype":"service",
+      "path":"{{ambari_server_log_dir}}/ambari-eclipselink.log"
+    },
+    {
+      "type":"ambari_server_check_database",
+      "rowtype":"service",
+      "path":"{{ambari_server_log_dir}}/ambari-server-check-database.log"
+    },
+    {
+      "type":"ambari_audit",
+      "rowtype":"audit",
+      "add_fields":{
+        "logType":"AmbariAudit",
+        "enforcer":"ambari-acl",
+        "repoType":"1",
+        "repo":"ambari",
+        "level":"INFO"
+      },
+      "path":"{{ambari_server_log_dir}}/ambari-audit.log"
+    }
+
+  ],
+  "filter":[
+    {
+      "filter":"grok",
+      "conditions":{
+        "fields":{
+          "type":[
+            "ambari_agent"
+          ]
+
+        }
+
+      },
+      "log4j_format":"",
+      "multiline_pattern":"^(%{LOGLEVEL:level} %{TIMESTAMP_ISO8601:logtime})",
+      "message_pattern":"(?m)^%{LOGLEVEL:level} %{TIMESTAMP_ISO8601:logtime} %{JAVAFILE:file}:%{INT:line_number} - %{GREEDYDATA:log_message}",
+      "post_map_values":{
+        "logtime":{
+          "map_date":{
+            "target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
+          }
+
+        },
+        "level":{
+          "map_fieldvalue":{
+            "pre_value":"WARNING",
+            "post_value":"WARN"
+          }
+
+        }
+
+      }
+
+    },
+    {
+      "filter":"grok",
+      "conditions":{
+        "fields":{
+          "type":[
+            "ambari_server"
+          ]
+          
+        }
+        
+      },
+      "log4j_format":"%d{DATE} %5p [%t] %c{1}:%L - %m%n",
+      "multiline_pattern":"^(%{USER_SYNC_DATE:logtime})",
+      "message_pattern":"(?m)^%{USER_SYNC_DATE:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}\\[%{DATA:thread_name}\\]%{SPACE}%{JAVACLASS:logger_name}:%{INT:line_number}%{SPACE}-%{SPACE}%{GREEDYDATA:log_message}",
+      "post_map_values":{
+        "logtime":{
+          "map_date":{
+            "target_date_pattern":"dd MMM yyyy HH:mm:ss"
+          }
+
+        }
+
+      }
+
+    },
+    {
+      "filter":"grok",
+      "conditions":{
+        "fields":{
+          "type":[
+            "ambari_alerts"
+          ]
+          
+        }
+        
+      },
+      "log4j_format":"%d{DATE} %5p [%t] %c{1}:%L - %m%n",
+      "multiline_pattern":"^(%{TIMESTAMP_ISO8601:logtime})",
+      "message_pattern":"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{GREEDYDATA:log_message}",
+      "post_map_values":{
+        "logtime":{
+          "map_date":{
+            "target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
+          }
+
+        }
+
+      }
+
+    },
+    {
+      "filter":"grok",
+      "conditions":{
+        "fields":{
+          "type":[
+            "ambari_config_changes"
+          ]
+          
+        }
+        
+      },
+      "log4j_format":"%d{DATE} %5p [%t] %c{1}:%L - %m%n",
+      "multiline_pattern":"^(%{TIMESTAMP_ISO8601:logtime})",
+      "message_pattern":"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}-%{SPACE}%{GREEDYDATA:log_message}",
+      "post_map_values":{
+        "logtime":{
+          "map_date":{
+            "target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
+          }
+
+        }
+
+      }
+
+    },
+    {
+      "filter":"grok",
+      "conditions":{
+        "fields":{
+          "type":[
+            "ambari_eclipselink"
+          ]
+          
+        }
+        
+      },
+      "log4j_format":"%d{DATE} %5p [%t] %c{1}:%L - %m%n",
+      "multiline_pattern":"^(\\[EL%{SPACE}%{LOGLEVEL:level}\\])",
+      "message_pattern":"(?m)^\\[EL%{SPACE}%{LOGLEVEL:level}\\]:%{SPACE}%{TIMESTAMP_ISO8601:logtime}%{GREEDYDATA:log_message}",
+      "post_map_values":{
+        "logtime":{
+          "map_date":{
+            "target_date_pattern":"yyyy-MM-dd HH:mm:ss.SSS"
+          }
+
+        },
+        "level":{
+          "map_fieldvalue":{
+            "pre_value":"Warning",
+            "post_value":"Warn"
+          }
+
+        }
+
+      }
+
+    },
+    {
+      "filter":"grok",
+      "conditions":{
+        "fields":{
+          "type":[
+            "ambari_server_check_database"
+          ]
+          
+        }
+        
+      },
+      "log4j_format":"%d{DATE} %5p [%t] %c{1}:%L - %m%n",
+      "multiline_pattern":"^(%{TIMESTAMP_ISO8601:logtime})",
+      "message_pattern":"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}-%{SPACE}%{GREEDYDATA:log_message}",
+      "post_map_values":{
+        "logtime":{
+          "map_date":{
+            "target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
+          }
+
+        }
+
+      }
+
+    },
+    {
+      "filter":"grok",
+      "conditions":{
+        "fields":{
+          "type":[
+            "ambari_audit"
+          ]
+
+        }
+
+      },
+      "log4j_format":"%d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n",
+      "multiline_pattern":"^(%{TIMESTAMP_ISO8601:evtTime})",
+      "message_pattern":"(?m)^%{TIMESTAMP_ISO8601:evtTime},%{SPACE}%{GREEDYDATA:log_message}",
+      "post_map_values":{
+        "evtTime":{
+          "map_date":{
+            "target_date_pattern":"yyyy-MM-dd'T'HH:mm:ss.SSSXX"
+          }
+
+        }
+
+      }
+
+    },
+    {
+      "filter":"keyvalue",
+      "sort_order":1,
+      "conditions":{
+        "fields":{
+          "type":[
+            "ambari_audit"
+          ]
+
+        }
+
+      },
+      "source_field":"log_message",
+      "field_split":", ",
+      "value_borders":"()",
+      "post_map_values":{
+        "User":{
+          "map_fieldvalue":{
+            "pre_value":"null",
+            "post_value":"unknown"
+          },
+          "map_fieldname":{
+            "new_fieldname":"reqUser"
+          }
+        },
+        "Hostname":{
+          "map_fieldname":{
+            "new_fieldname":"host"
+          }
+        },
+        "Host name":{
+          "map_fieldname":{
+            "new_fieldname":"host"
+          }
+        },
+        "RemoteIp":{
+          "map_fieldname":{
+            "new_fieldname":"cliIP"
+          }
+        },
+        "RequestType":{
+          "map_fieldname":{
+            "new_fieldname":"cliType"
+          }
+        },
+        "RequestId":{
+          "map_fieldname":{
+            "new_fieldname":"request_id"
+          }
+        },
+        "TaskId":{
+          "map_fieldname":{
+            "new_fieldname":"task_id"
+          }
+        },
+        "Operation":{
+          "map_fieldname":{
+            "new_fieldname":"action"
+          }
+        },
+        "url":{
+          "map_fieldname":{
+            "new_fieldname":"resource"
+          }
+        },
+        "ResourcePath":{
+          "map_fieldname":{
+            "new_fieldname":"resource"
+          }
+        },
+        "Cluster name":{
+          "map_fieldname":{
+            "new_fieldname":"cluster"
+          }
+        },
+        "Reason":{
+          "map_fieldname":{
+            "new_fieldname":"reason"
+          }
+        },
+        "Base URL":{
+          "map_fieldname":{
+            "new_fieldname":"ws_base_url"
+          }
+        },
+        "Command":{
+          "map_fieldvalue":{
+            "pre_value":"null",
+            "post_value":"unknown"
+          },
+          "map_fieldname":{
+            "new_fieldname":"ws_command"
+          }
+        },
+        "Component":{
+          "map_fieldname":{
+            "new_fieldname":"ws_component"
+          }
+        },
+        "Details":{
+          "map_fieldname":{
+            "new_fieldname":"ws_details"
+          }
+        },
+        "Display name":{
+          "map_fieldvalue":{
+            "pre_value":"null",
+            "post_value":"unknown"
+          },
+          "map_fieldname":{
+            "new_fieldname":"ws_display_name"
+          }
+        },
+        "OS":{
+          "map_fieldname":{
+            "new_fieldname":"ws_os"
+          }
+        },
+        "Repo id":{
+          "map_fieldname":{
+            "new_fieldname":"ws_repo_id"
+          }
+        },
+        "Repo version":{
+          "map_fieldvalue":{
+            "pre_value":"null",
+            "post_value":"unknown"
+          },
+          "map_fieldname":{
+            "new_fieldname":"ws_repo_version"
+          }
+        },
+        "Repositories":{
+          "map_fieldname":{
+            "new_fieldname":"ws_repositories"
+          }
+        },
+        "Roles":{
+          "map_fieldname":{
+            "new_fieldname":"ws_roles"
+          }
+        },
+        "Stack":{
+          "map_fieldname":{
+            "new_fieldname":"ws_stack"
+          }
+        },
+        "Stack version":{
+          "map_fieldname":{
+            "new_fieldname":"ws_stack_version"
+          }
+        },
+        "VersionNote":{
+          "map_fieldvalue":{
+            "pre_value":"null",
+            "post_value":"unknown"
+          },
+          "map_fieldname":{
+            "new_fieldname":"ws_version_note"
+          }
+        },
+        "VersionNumber":{
+          "map_fieldvalue":{
+            "pre_value":"Vnull",
+            "post_value":"unknown"
+          },
+          "map_fieldname":{
+            "new_fieldname":"ws_version_number"
+          }
+        },
+        "Status":[
+          {
+            "map_fieldcopy":{
+              "copy_name": "ws_status"
+            }
+          },
+          {
+            "map_fieldvalue":{
+              "pre_value":"Success",
+              "post_value":"1"
+            }
+          },
+          {
+            "map_fieldvalue":{
+              "pre_value":"Successfully queued",
+              "post_value":"1"
+            }
+          },
+          {
+            "map_fieldvalue":{
+              "pre_value":"QUEUED",
+              "post_value":"1"
+            }
+          },
+          {
+            "map_fieldvalue":{
+              "pre_value":"PENDING",
+              "post_value":"1"
+            }
+          },
+          {
+            "map_fieldvalue":{
+              "pre_value":"COMPLETED",
+              "post_value":"1"
+            }
+          },
+          {
+            "map_fieldvalue":{
+              "pre_value":"IN_PROGRESS",
+              "post_value":"1"
+            }
+          },
+          {
+            "map_fieldvalue":{
+              "pre_value":"Failed",
+              "post_value":"0"
+            }
+          },
+          {
+            "map_fieldvalue":{
+              "pre_value":"Failed to queue",
+              "post_value":"0"
+            }
+          },
+          {
+            "map_fieldvalue":{
+              "pre_value":"HOLDING",
+              "post_value":"0"
+            }
+          },
+          {
+            "map_fieldvalue":{
+              "pre_value":"HOLDING_FAILED",
+              "post_value":"0"
+            }
+          },
+          {
+            "map_fieldvalue":{
+              "pre_value":"HOLDING_TIMEDOUT",
+              "post_value":"0"
+            }
+          },
+          {
+            "map_fieldvalue":{
+              "pre_value":"FAILED",
+              "post_value":"0"
+            }
+          },
+          {
+            "map_fieldvalue":{
+              "pre_value":"TIMEDOUT",
+              "post_value":"0"
+            }
+          },
+          {
+            "map_fieldvalue":{
+              "pre_value":"ABORTED",
+              "post_value":"0"
+            }
+          },
+          {
+            "map_fieldvalue":{
+              "pre_value":"SKIPPED_FAILED",
+              "post_value":"0"
+            }
+          },
+          {
+            "map_fieldname":{
+              "new_fieldname":"result"
+            }
+          }
+        ],
+        "ResultStatus":[
+          {
+            "map_fieldcopy":{
+              "copy_name": "ws_result_status"
+            }
+          },
+          {
+            "map_fieldvalue":{
+              "pre_value":"200 OK",
+              "post_value":"1"
+            }
+          },
+          {
+            "map_fieldvalue":{
+              "pre_value":"201 Created",
+              "post_value":"1"
+            }
+          },
+          {
+            "map_fieldvalue":{
+              "pre_value":"202 Accepted",
+              "post_value":"1"
+            }
+          },
+          {
+            "map_fieldvalue":{
+              "pre_value":"400 Bad Request",
+              "post_value":"0"
+            }
+          },
+          {
+            "map_fieldvalue":{
+              "pre_value":"401 Unauthorized",
+              "post_value":"0"
+            }
+          },
+          {
+            "map_fieldvalue":{
+              "pre_value":"403 Forbidden",
+              "post_value":"0"
+            }
+          },
+          {
+            "map_fieldvalue":{
+              "pre_value":"404 Not Found",
+              "post_value":"0"
+            }
+          },
+          {
+            "map_fieldvalue":{
+              "pre_value":"409 Resource Conflict",
+              "post_value":"0"
+            }
+          },
+          {
+            "map_fieldvalue":{
+              "pre_value":"500 Internal Server Error",
+              "post_value":"0"
+            }
+          },
+          {
+            "map_fieldname":{
+              "new_fieldname":"result"
+            }
+          }
+        ]
+
+      }
+
+    }
+
+  ]
+
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/c295941b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/properties/output.config.json.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/properties/output.config.json.j2 b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/properties/output.config.json.j2
new file mode 100644
index 0000000..062d636
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/properties/output.config.json.j2
@@ -0,0 +1,61 @@
+{#
+ # Licensed to the Apache Software Foundation (ASF) under one
+ # or more contributor license agreements.  See the NOTICE file
+ # distributed with this work for additional information
+ # regarding copyright ownership.  The ASF licenses this file
+ # to you under the Apache License, Version 2.0 (the
+ # "License"); you may not use this file except in compliance
+ # with the License.  You may obtain a copy of the License at
+ #
+ #   http://www.apache.org/licenses/LICENSE-2.0
+ #
+ # Unless required by applicable law or agreed to in writing, software
+ # distributed under the License is distributed on an "AS IS" BASIS,
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ # See the License for the specific language governing permissions and
+ # limitations under the License.
+ #}
+{
+  "output":[
+    {
+      "comment":"Output to solr for service logs",
+      "is_enabled":"{{solr_service_logs_enable}}",
+      "destination":"solr",
+      "zk_connect_string":"{{zookeeper_quorum}}{{infra_solr_znode}}",
+      "collection":"{{logsearch_solr_collection_service_logs}}",
+      "number_of_shards": "{{logsearch_collection_service_logs_numshards}}",
+      "splits_interval_mins": "{{logsearch_service_logs_split_interval_mins}}",
+      "conditions":{
+        "fields":{
+          "rowtype":[
+            "service"
+          ]
+
+        }
+
+      }
+
+    },
+    {
+      "comment":"Output to solr for audit records",
+      "is_enabled":"{{solr_audit_logs_enable}}",
+      "destination":"solr",
+      "zk_connect_string":"{{zookeeper_quorum}}{{infra_solr_znode}}",
+      "collection":"{{logsearch_solr_collection_audit_logs}}",
+      "number_of_shards": "{{logsearch_collection_audit_logs_numshards}}",
+      "splits_interval_mins": "{{logsearch_audit_logs_split_interval_mins}}",
+      "conditions":{
+        "fields":{
+          "rowtype":[
+            "audit"
+          ]
+
+        }
+
+      }
+
+    }
+
+  ]
+
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/c295941b/ambari-server/src/test/python/stacks/2.4/LOGSEARCH/test_logfeeder.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.4/LOGSEARCH/test_logfeeder.py b/ambari-server/src/test/python/stacks/2.4/LOGSEARCH/test_logfeeder.py
index 773b75a..02570e2 100644
--- a/ambari-server/src/test/python/stacks/2.4/LOGSEARCH/test_logfeeder.py
+++ b/ambari-server/src/test/python/stacks/2.4/LOGSEARCH/test_logfeeder.py
@@ -62,7 +62,7 @@ class TestLogFeeder(RMFTestCase):
                               )
     self.assertResourceCalled('PropertiesFile', '/etc/ambari-logsearch-logfeeder/conf/logfeeder.properties',
                               properties={'logfeeder.checkpoint.folder': '/etc/ambari-logsearch-logfeeder/conf/checkpoints',
-                                          'logfeeder.config.files': 'global.config.json,output.config.json,input.config-ambari.json,input.config-logsearch.json,input.config-zookeeper.json',
+                                          'logfeeder.config.files': 'output.config.json,input.config-ambari.json,global.config.json,input.config-logsearch.json,input.config-zookeeper.json',
                                           'logfeeder.metrics.collector.hosts': '',
                                           'logfeeder.solr.core.config.name': 'history',
                                           'logfeeder.solr.zk_connect_string': 'c6401.ambari.apache.org:2181/infra-solr'
@@ -79,10 +79,18 @@ class TestLogFeeder(RMFTestCase):
                               content=InlineTemplate('GP'),
                               encoding='utf-8'
                               )
+    self.assertResourceCalled('File', '/etc/ambari-logsearch-logfeeder/conf/input.config-ambari.json',
+                              content=InlineTemplate('ambari-grok-filter'),
+                              encoding='utf-8'
+                              )
+    self.assertResourceCalled('File', '/etc/ambari-logsearch-logfeeder/conf/output.config.json',
+                              content=InlineTemplate('output-grok-filter'),
+                              encoding='utf-8'
+                              )
 
-    logfeeder_supported_services = ['ambari','logsearch']
+    logfeeder_supported_services = ['logsearch']
 
-    logfeeder_config_file_names = ['global.config.json', 'output.config.json'] + \
+    logfeeder_config_file_names = ['global.config.json'] + \
                                   ['input.config-%s.json' % (tag) for tag in logfeeder_supported_services]
 
     for file_name in logfeeder_config_file_names:

http://git-wip-us.apache.org/repos/asf/ambari/blob/c295941b/ambari-server/src/test/python/stacks/2.4/configs/default.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.4/configs/default.json b/ambari-server/src/test/python/stacks/2.4/configs/default.json
index c3eba53..a6e2478 100644
--- a/ambari-server/src/test/python/stacks/2.4/configs/default.json
+++ b/ambari-server/src/test/python/stacks/2.4/configs/default.json
@@ -313,6 +313,12 @@
         "logfeeder_max_mem": "512m",
         "content": "# Licensed to the Apache Software Foundation (ASF) under one or more\n# contributor license agreements.  See the NOTICE file distributed with\n# this work for additional information regarding copyright ownership.\n# The ASF licenses this file to You under the Apache License, Version 2.0\n# (the \"License\"); you may not use this file except in compliance with\n# the License.  You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nlogsearch.solr.metrics.collector.hosts=http://{{metrics_collector_hosts}}:{{metrics_collector_port}}/ws/v1/timeline/metrics\n{% if logsearch_solr_ssl_enabled %}\nexport LOGFEED
 ER_SSL=\"true\"\nexport LOGFEEDER_KEYSTORE_LOCATION={{logfeeder_keystore_location}}\nexport LOGFEEDER_KEYSTORE_PASSWORD={{logfeeder_keystore_password}}\nexport LOGFEEDER_KEYSTORE_TYPE={{logfeeder_keystore_type}}\nexport LOGFEEDER_TRUSTSTORE_LOCATION={{logfeeder_truststore_location}}\nexport LOGFEEDER_TRUSTSTORE_PASSWORD={{logfeeder_truststore_password}}\nexport LOGFEEDER_TRUSTSTORE_TYPE={{logfeeder_truststore_type}}\n{% endif %}"
       },
+      "logfeeder-output-config" : {
+        "content" : "output-grok-filter"
+      },
+      "logfeeder-ambari-config" : {
+        "content" : "ambari-grok-filter"
+      },
       "logfeeder-grok": {
         "default_grok_patterns": "GP",
         "custom_grok_patterns": ""


[16/50] ambari git commit: AMBARI-18834 Add Ranger proxy user under Ranger KMS config during stack upgrade (mugdha)

Posted by sw...@apache.org.
AMBARI-18834 Add Ranger proxy user under Ranger KMS config during stack upgrade (mugdha)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/d3c75557
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/d3c75557
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/d3c75557

Branch: refs/heads/branch-feature-AMBARI-18901
Commit: d3c75557ec7bc20fa9b2782137a6412aa09a8afb
Parents: 68a881e
Author: Mugdha Varadkar <mu...@apache.org>
Authored: Wed Nov 23 10:55:03 2016 +0530
Committer: Mugdha Varadkar <mu...@apache.org>
Committed: Tue Nov 29 09:31:12 2016 +0530

----------------------------------------------------------------------
 .../upgrades/RangerKmsProxyConfig.java          |  95 +++++++++++++
 .../HDP/2.3/upgrades/nonrolling-upgrade-2.5.xml |   7 +
 .../HDP/2.3/upgrades/nonrolling-upgrade-2.6.xml |   7 +
 .../stacks/HDP/2.3/upgrades/upgrade-2.5.xml     |  11 ++
 .../stacks/HDP/2.3/upgrades/upgrade-2.6.xml     |  11 ++
 .../HDP/2.4/upgrades/nonrolling-upgrade-2.5.xml |   7 +
 .../HDP/2.4/upgrades/nonrolling-upgrade-2.6.xml |   7 +
 .../stacks/HDP/2.4/upgrades/upgrade-2.5.xml     |  11 ++
 .../stacks/HDP/2.4/upgrades/upgrade-2.6.xml     |  11 ++
 .../upgrades/RangerKmsProxyConfigTest.java      | 141 +++++++++++++++++++
 10 files changed, 308 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/d3c75557/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/RangerKmsProxyConfig.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/RangerKmsProxyConfig.java b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/RangerKmsProxyConfig.java
new file mode 100644
index 0000000..bb88f55
--- /dev/null
+++ b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/RangerKmsProxyConfig.java
@@ -0,0 +1,95 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.server.serveraction.upgrades;
+
+import java.text.MessageFormat;
+import java.util.Map;
+import java.util.concurrent.ConcurrentMap;
+
+import org.apache.ambari.server.AmbariException;
+import org.apache.ambari.server.actionmanager.HostRoleStatus;
+import org.apache.ambari.server.agent.CommandReport;
+import org.apache.ambari.server.serveraction.AbstractServerAction;
+import org.apache.ambari.server.state.Cluster;
+import org.apache.ambari.server.state.Clusters;
+import org.apache.ambari.server.state.Config;
+import org.apache.ambari.server.state.SecurityType;
+import org.apache.commons.lang.StringUtils;
+
+import com.google.inject.Inject;
+
+/**
+* Computes Ranger KMS Proxy properties in kms-site
+*/
+
+public class RangerKmsProxyConfig extends AbstractServerAction {
+  private static final String RANGER_ENV_CONFIG_TYPE = "ranger-env";
+  private static final String RANGER_KMS_SITE_CONFIG_TYPE = "kms-site";
+
+  @Inject
+  private Clusters m_clusters;
+
+  @Override
+  public CommandReport execute(ConcurrentMap<String, Object> requestSharedDataContext)
+    throws AmbariException, InterruptedException {
+
+    String clusterName = getExecutionCommand().getClusterName();
+    Cluster cluster = m_clusters.getCluster(clusterName);
+    String outputMsg = "";
+
+    Config rangerEnv = cluster.getDesiredConfigByType(RANGER_ENV_CONFIG_TYPE);
+
+    if (null == rangerEnv) {
+      return createCommandReport(0, HostRoleStatus.COMPLETED, "{}",
+        MessageFormat.format("Config source type {0} not found, skipping adding properties to {1}.", RANGER_ENV_CONFIG_TYPE, RANGER_KMS_SITE_CONFIG_TYPE), "");
+    }
+
+    String rangerUserProp = "ranger_user";
+    String rangerUser = rangerEnv.getProperties().get(rangerUserProp);
+
+    if (null == rangerUser) {
+      return createCommandReport(0, HostRoleStatus.COMPLETED, "{}",
+        MessageFormat.format("Required user service user value from {0}/{1} not found, skipping adding properties to {2}.", RANGER_ENV_CONFIG_TYPE, rangerUserProp, RANGER_KMS_SITE_CONFIG_TYPE), "");
+    }
+
+    Config kmsSite = cluster.getDesiredConfigByType(RANGER_KMS_SITE_CONFIG_TYPE);
+
+    if (null == kmsSite) {
+      return createCommandReport(0, HostRoleStatus.COMPLETED, "{}",
+        MessageFormat.format("Config type {0} not found, skipping adding properties to it.", RANGER_KMS_SITE_CONFIG_TYPE), "");
+    }
+
+    Map<String, String> targetValues = kmsSite.getProperties();
+    if (cluster.getSecurityType() == SecurityType.KERBEROS) {
+      String userProp = "hadoop.kms.proxyuser." + rangerUser + ".users";
+      String groupProp = "hadoop.kms.proxyuser." + rangerUser + ".groups";
+      String hostProp = "hadoop.kms.proxyuser." + rangerUser + ".hosts";
+      targetValues.put(userProp, "*");
+      targetValues.put(groupProp, "*");
+      targetValues.put(hostProp, "*");
+      kmsSite.setProperties(targetValues);
+      kmsSite.persist(false);
+      outputMsg = outputMsg + MessageFormat.format("Successfully added properties to {0}", RANGER_KMS_SITE_CONFIG_TYPE);
+    } else {
+      outputMsg = outputMsg +  MessageFormat.format("Kerberos not enable, not setting proxy properties to {0}", RANGER_KMS_SITE_CONFIG_TYPE);
+    }
+
+    return createCommandReport(0, HostRoleStatus.COMPLETED, "{}", outputMsg, "");
+
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/d3c75557/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.5.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.5.xml b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.5.xml
index 04e4f3e..7a15e0e 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.5.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.5.xml
@@ -435,6 +435,13 @@
         <task xsi:type="configure" id="hdp_2_5_0_0_remove_ranger_kms_audit_db"/>
       </execute-stage>
 
+      <execute-stage service="RANGER_KMS" component="RANGER_KMS_SERVER" title="Calculating Ranger Properties">
+        <condition xsi:type="security" type="kerberos"/>
+        <task xsi:type="server_action" class="org.apache.ambari.server.serveraction.upgrades.RangerKmsProxyConfig">
+          <summary>Adding Ranger proxy user properties</summary>
+        </task>
+      </execute-stage>
+
       <!-- KNOX -->
       <execute-stage service="KNOX" component="KNOX_GATEWAY" title="Apply config changes for Knox Gateway">
         <task xsi:type="configure" id="hdp_2_5_0_0_remove_ranger_knox_audit_db"/>

http://git-wip-us.apache.org/repos/asf/ambari/blob/d3c75557/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.6.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.6.xml b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.6.xml
index 3233f7e..fe2598b 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.6.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.6.xml
@@ -436,6 +436,13 @@
         <task xsi:type="configure" id="hdp_2_5_0_0_remove_ranger_kms_audit_db"/>
       </execute-stage>
 
+      <execute-stage service="RANGER_KMS" component="RANGER_KMS_SERVER" title="Calculating Ranger Properties">
+        <condition xsi:type="security" type="kerberos"/>
+        <task xsi:type="server_action" class="org.apache.ambari.server.serveraction.upgrades.RangerKmsProxyConfig">
+          <summary>Adding Ranger proxy user properties</summary>
+        </task>
+      </execute-stage>
+
       <!-- KNOX -->
       <execute-stage service="KNOX" component="KNOX_GATEWAY" title="Apply config changes for Knox Gateway">
         <task xsi:type="configure" id="hdp_2_5_0_0_remove_ranger_knox_audit_db"/>

http://git-wip-us.apache.org/repos/asf/ambari/blob/d3c75557/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.5.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.5.xml b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.5.xml
index 95c5f06..bbf5299 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.5.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.5.xml
@@ -130,6 +130,17 @@
       </service>
     </group>
 
+    <group xsi:type="cluster" name="UPDATE_RANGER_KMS_SITE" title="Update Ranger KMS Configuration">
+      <direction>UPGRADE</direction>
+      <skippable>true</skippable>
+      <execute-stage service="RANGER_KMS" component="RANGER_KMS_SERVER" title="Calculating Proxy Properties under kms-site">
+        <condition xsi:type="security" type="kerberos"/>
+        <task xsi:type="server_action" class="org.apache.ambari.server.serveraction.upgrades.RangerKmsProxyConfig">
+          <summary>Adding Ranger proxy user properties under kms-site</summary>
+        </task>
+      </execute-stage>
+    </group>
+
     <group name="RANGER_KMS" title="Ranger_KMS">
       <skippable>true</skippable>
       <supports-auto-skip-failure>false</supports-auto-skip-failure>

http://git-wip-us.apache.org/repos/asf/ambari/blob/d3c75557/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.6.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.6.xml b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.6.xml
index a01996a..c5fdc99 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.6.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.6.xml
@@ -131,6 +131,17 @@
       </service>
     </group>
 
+    <group xsi:type="cluster" name="UPDATE_RANGER_KMS_SITE" title="Update Ranger KMS Configuration">
+      <direction>UPGRADE</direction>
+      <skippable>true</skippable>
+      <execute-stage service="RANGER_KMS" component="RANGER_KMS_SERVER" title="Calculating Proxy Properties under kms-site">
+        <condition xsi:type="security" type="kerberos"/>
+        <task xsi:type="server_action" class="org.apache.ambari.server.serveraction.upgrades.RangerKmsProxyConfig">
+          <summary>Adding Ranger proxy user properties under kms-site</summary>
+        </task>
+      </execute-stage>
+    </group>
+
     <group name="RANGER_KMS" title="Ranger_KMS">
       <skippable>true</skippable>
       <supports-auto-skip-failure>false</supports-auto-skip-failure>

http://git-wip-us.apache.org/repos/asf/ambari/blob/d3c75557/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/nonrolling-upgrade-2.5.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/nonrolling-upgrade-2.5.xml b/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/nonrolling-upgrade-2.5.xml
index 05e2be1..a46ac9e 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/nonrolling-upgrade-2.5.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/nonrolling-upgrade-2.5.xml
@@ -423,6 +423,13 @@
         <task xsi:type="configure" id="hdp_2_5_0_0_remove_ranger_kms_audit_db"/>
       </execute-stage>
 
+      <execute-stage service="RANGER_KMS" component="RANGER_KMS_SERVER" title="Calculating Ranger Properties">
+        <condition xsi:type="security" type="kerberos"/>
+        <task xsi:type="server_action" class="org.apache.ambari.server.serveraction.upgrades.RangerKmsProxyConfig">
+          <summary>Adding Ranger proxy user properties</summary>
+        </task>
+      </execute-stage>
+
       <!-- SPARK -->
       <execute-stage service="SPARK" component="SPARK_JOBHISTORYSERVER" title="Apply config changes for Spark JobHistoryServer">
         <task xsi:type="configure" id="hdp_2_5_0_0_spark_jobhistoryserver"/>

http://git-wip-us.apache.org/repos/asf/ambari/blob/d3c75557/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/nonrolling-upgrade-2.6.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/nonrolling-upgrade-2.6.xml b/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/nonrolling-upgrade-2.6.xml
index 1a26e59..6597cde 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/nonrolling-upgrade-2.6.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/nonrolling-upgrade-2.6.xml
@@ -418,6 +418,13 @@
       <execute-stage service="RANGER_KMS" component="RANGER_KMS_SERVER" title="Apply config changes for Ranger KMS Server">
         <task xsi:type="configure" id="hdp_2_5_0_0_remove_ranger_kms_audit_db"/>
       </execute-stage>
+
+      <execute-stage service="RANGER_KMS" component="RANGER_KMS_SERVER" title="Calculating Ranger Properties">
+        <condition xsi:type="security" type="kerberos"/>
+        <task xsi:type="server_action" class="org.apache.ambari.server.serveraction.upgrades.RangerKmsProxyConfig">
+          <summary>Adding Ranger proxy user properties</summary>
+        </task>
+      </execute-stage>
     </group>
 
     <!--

http://git-wip-us.apache.org/repos/asf/ambari/blob/d3c75557/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/upgrade-2.5.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/upgrade-2.5.xml b/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/upgrade-2.5.xml
index f0c6131..f5a5669 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/upgrade-2.5.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/upgrade-2.5.xml
@@ -126,6 +126,17 @@
       </service>
     </group>
 
+    <group xsi:type="cluster" name="UPDATE_RANGER_KMS_SITE" title="Update Ranger KMS Configuration">
+      <direction>UPGRADE</direction>
+      <skippable>true</skippable>
+      <execute-stage service="RANGER_KMS" component="RANGER_KMS_SERVER" title="Calculating Proxy Properties under kms-site">
+        <condition xsi:type="security" type="kerberos"/>
+        <task xsi:type="server_action" class="org.apache.ambari.server.serveraction.upgrades.RangerKmsProxyConfig">
+          <summary>Adding Ranger proxy user properties under kms-site</summary>
+        </task>
+      </execute-stage>
+    </group>
+
     <group name="RANGER_KMS" title="Ranger_KMS">
       <skippable>true</skippable>
       <supports-auto-skip-failure>false</supports-auto-skip-failure>

http://git-wip-us.apache.org/repos/asf/ambari/blob/d3c75557/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/upgrade-2.6.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/upgrade-2.6.xml b/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/upgrade-2.6.xml
index f520faf..406f38e 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/upgrade-2.6.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/upgrade-2.6.xml
@@ -131,6 +131,17 @@
       </service>
     </group>
 
+    <group xsi:type="cluster" name="UPDATE_RANGER_KMS_SITE" title="Update Ranger KMS Configuration">
+      <direction>UPGRADE</direction>
+      <skippable>true</skippable>
+      <execute-stage service="RANGER_KMS" component="RANGER_KMS_SERVER" title="Calculating Proxy Properties under kms-site">
+        <condition xsi:type="security" type="kerberos"/>
+        <task xsi:type="server_action" class="org.apache.ambari.server.serveraction.upgrades.RangerKmsProxyConfig">
+          <summary>Adding Ranger proxy user properties under kms-site</summary>
+        </task>
+      </execute-stage>
+    </group>
+
     <group name="RANGER_KMS" title="Ranger_KMS">
       <skippable>true</skippable>
       <supports-auto-skip-failure>false</supports-auto-skip-failure>

http://git-wip-us.apache.org/repos/asf/ambari/blob/d3c75557/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/RangerKmsProxyConfigTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/RangerKmsProxyConfigTest.java b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/RangerKmsProxyConfigTest.java
new file mode 100644
index 0000000..e000c65
--- /dev/null
+++ b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/RangerKmsProxyConfigTest.java
@@ -0,0 +1,141 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.server.serveraction.upgrades;
+
+import static org.easymock.EasyMock.anyObject;
+import static org.easymock.EasyMock.expect;
+import static org.easymock.EasyMock.replay;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+
+import java.lang.reflect.Field;
+import java.util.HashMap;
+import java.util.Map;
+
+import org.apache.ambari.server.actionmanager.ExecutionCommandWrapper;
+import org.apache.ambari.server.actionmanager.HostRoleCommand;
+import org.apache.ambari.server.agent.CommandReport;
+import org.apache.ambari.server.agent.ExecutionCommand;
+import org.apache.ambari.server.state.Cluster;
+import org.apache.ambari.server.state.Clusters;
+import org.apache.ambari.server.state.SecurityType;
+import org.apache.ambari.server.state.Config;
+import org.apache.ambari.server.state.ConfigImpl;
+import org.easymock.EasyMock;
+import org.junit.Before;
+import org.junit.Test;
+
+import com.google.inject.Injector;
+
+
+public class RangerKmsProxyConfigTest {
+  private Injector m_injector;
+  private Clusters m_clusters;
+  private Field m_clusterField;
+
+  @Before
+  public void setup() throws Exception {
+    m_injector = EasyMock.createMock(Injector.class);
+    m_clusters = EasyMock.createMock(Clusters.class);
+    Cluster cluster = EasyMock.createMock(Cluster.class);
+
+    Config rangerEnv = new ConfigImpl("ranger-env") {
+      Map<String, String> mockProperties = new HashMap<String, String>() {{
+        put("ranger_user", "ranger");
+      }};
+
+      @Override
+      public Map<String, String> getProperties() {
+        return mockProperties;
+      }
+    };
+
+    Config kmsSite = new ConfigImpl("kms-site") {
+      Map<String, String> mockProperties = new HashMap<String, String>();
+      @Override
+      public Map<String, String> getProperties() {
+        return mockProperties;
+      }
+
+      @Override
+      public void setProperties(Map<String, String> properties) {
+        mockProperties.putAll(properties);
+      }
+
+      @Override
+      public void persist(boolean newConfig) {
+        // no-op
+      }
+    };
+
+    expect(cluster.getDesiredConfigByType("ranger-env")).andReturn(rangerEnv).atLeastOnce();
+    expect(cluster.getDesiredConfigByType("kms-site")).andReturn(kmsSite).atLeastOnce();
+    expect(m_clusters.getCluster((String) anyObject())).andReturn(cluster).anyTimes();
+    expect(m_injector.getInstance(Clusters.class)).andReturn(m_clusters).atLeastOnce();
+    expect(cluster.getSecurityType()).andReturn(SecurityType.KERBEROS).anyTimes();
+
+    replay(m_injector, m_clusters, cluster);
+
+    m_clusterField = RangerKmsProxyConfig.class.getDeclaredField("m_clusters");
+    m_clusterField.setAccessible(true);
+  }
+
+  @Test
+  public void testAction() throws Exception {
+
+    Map<String, String> commandParams = new HashMap<String, String>();
+    commandParams.put("clusterName", "c1");
+
+    ExecutionCommand executionCommand = new ExecutionCommand();
+    executionCommand.setCommandParams(commandParams);
+    executionCommand.setClusterName("c1");
+
+    HostRoleCommand hrc = EasyMock.createMock(HostRoleCommand.class);
+    expect(hrc.getRequestId()).andReturn(1L).anyTimes();
+    expect(hrc.getStageId()).andReturn(2L).anyTimes();
+    expect(hrc.getExecutionCommandWrapper()).andReturn(new ExecutionCommandWrapper(executionCommand)).anyTimes();
+    replay(hrc);
+
+    RangerKmsProxyConfig action = new RangerKmsProxyConfig();
+    m_clusterField.set(action, m_clusters);
+
+    action.setExecutionCommand(executionCommand);
+    action.setHostRoleCommand(hrc);
+
+    CommandReport report = action.execute(null);
+    assertNotNull(report);
+
+    Cluster c = m_clusters.getCluster("c1");
+    Config config = c.getDesiredConfigByType("kms-site");
+    Map<String, String> map = config.getProperties();
+
+    assertTrue(map.containsKey("hadoop.kms.proxyuser.ranger.users"));
+    assertTrue(map.containsKey("hadoop.kms.proxyuser.ranger.groups"));
+    assertTrue(map.containsKey("hadoop.kms.proxyuser.ranger.hosts"));
+
+
+    assertEquals("*", map.get("hadoop.kms.proxyuser.ranger.users"));
+    assertEquals("*", map.get("hadoop.kms.proxyuser.ranger.groups"));
+    assertEquals("*", map.get("hadoop.kms.proxyuser.ranger.hosts"));
+
+    report = action.execute(null);
+    assertNotNull(report);
+
+  }
+}
\ No newline at end of file


[40/50] ambari git commit: AMBARI-12697 Rolling upgrade: Ambari UI should be able to display rolling upgrade history (dili via atkach)

Posted by sw...@apache.org.
AMBARI-12697 Rolling upgrade: Ambari UI should be able to display rolling upgrade history (dili via atkach)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/ef418377
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/ef418377
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/ef418377

Branch: refs/heads/branch-feature-AMBARI-18901
Commit: ef41837712fee03772aca344b2edc337396974a0
Parents: 502cffb
Author: Andrii Tkach <at...@apache.org>
Authored: Wed Nov 30 20:32:16 2016 +0200
Committer: Andrii Tkach <at...@apache.org>
Committed: Wed Nov 30 20:32:16 2016 +0200

----------------------------------------------------------------------
 ambari-web/app/assets/test/tests.js             |   5 +
 ambari-web/app/controllers.js                   |   1 +
 .../admin/stack_upgrade_history_controller.js   | 217 +++++++++++
 ambari-web/app/mappers.js                       |   3 +-
 .../app/mappers/stack_upgrade_history_mapper.js |  54 +++
 ambari-web/app/messages.js                      |  28 ++
 ambari-web/app/models.js                        |   2 +
 .../app/models/finished_upgrade_entity.js       |  92 +++++
 .../stack_version/stack_upgrade_history.js      |  37 ++
 ambari-web/app/routes/main.js                   |   7 +
 ambari-web/app/styles/stack_versions.less       |  69 ++++
 .../admin/stack_upgrade/upgrade_history.hbs     | 105 ++++++
 .../stack_upgrade/upgrade_history_details.hbs   |  46 +++
 ambari-web/app/views.js                         |   2 +
 .../views/main/admin/stack_upgrade/menu_view.js |   6 +
 .../upgrade_history_details_view.js             |  85 +++++
 .../admin/stack_upgrade/upgrade_history_view.js | 303 +++++++++++++++
 .../stack_upgrade_history_controller_test.js    | 125 +++++++
 .../stack_upgrade_history_mapper_test.js        | 372 +++++++++++++++++++
 .../test/models/finished_upgrade_entity_test.js | 197 ++++++++++
 .../upgrade_history_details_view_test.js        | 248 +++++++++++++
 .../stack_upgrade/upgrade_history_view_test.js  | 173 +++++++++
 22 files changed, 2176 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/ef418377/ambari-web/app/assets/test/tests.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/assets/test/tests.js b/ambari-web/app/assets/test/tests.js
index 8571fc4..7440819 100644
--- a/ambari-web/app/assets/test/tests.js
+++ b/ambari-web/app/assets/test/tests.js
@@ -70,6 +70,7 @@ var files = [
   'test/controllers/main/admin/kerberos/step8_controller_test',
 
   'test/controllers/main/admin/stack_and_upgrade_controller_test',
+  'test/controllers/main/admin/stack_upgrade_history_controller_test',
   'test/controllers/main/admin/serviceAccounts_controller_test',
   'test/controllers/main/admin/highAvailability_controller_test',
   'test/controllers/main/admin/highAvailability/progress_controller_test',
@@ -145,6 +146,7 @@ var files = [
   'test/mappers/users_mapper_test',
   'test/mappers/stack_mapper_test',
   'test/mappers/stack_service_mapper_test',
+  'test/mappers/stack_upgrade_history_mapper_test',
   'test/mappers/repository_version_mapper_test',
   'test/mappers/configs/config_groups_mapper_test',
   'test/mappers/configs/service_config_version_mapper_test',
@@ -251,6 +253,8 @@ var files = [
   'test/views/main/admin/stack_upgrade/upgrade_group_view_test',
   'test/views/main/admin/stack_upgrade/upgrade_task_view_test',
   'test/views/main/admin/stack_upgrade/upgrade_wizard_view_test',
+  'test/views/main/admin/stack_upgrade/upgrade_history_view_test',
+  'test/views/main/admin/stack_upgrade/upgrade_history_details_view_test',
   'test/views/main/admin/stack_upgrade/version_view_test',
   'test/views/main/admin/stack_upgrade/services_view_test',
   'test/views/main/admin/stack_upgrade/menu_view_test',
@@ -384,6 +388,7 @@ var files = [
   'test/models/widget_property_test',
   'test/models/host_stack_version_test',
   'test/models/upgrade_entity_test',
+  'test/models/finished_upgrade_entity_test',
   'test/models/configs/sub_section_test',
   'test/models/configs/section_test',
   'test/models/configs/service_config_version_test',

http://git-wip-us.apache.org/repos/asf/ambari/blob/ef418377/ambari-web/app/controllers.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/controllers.js b/ambari-web/app/controllers.js
index 5664029..81e5eb7 100644
--- a/ambari-web/app/controllers.js
+++ b/ambari-web/app/controllers.js
@@ -85,6 +85,7 @@ require('controllers/main/admin/highAvailability/journalNode/step6_controller');
 require('controllers/main/admin/highAvailability/journalNode/step7_controller');
 require('controllers/main/admin/highAvailability/journalNode/step8_controller');
 require('controllers/main/admin/stack_and_upgrade_controller');
+require('controllers/main/admin/stack_upgrade_history_controller');
 require('controllers/main/admin/serviceAccounts_controller');
 require('utils/polling');
 require('controllers/main/admin/kerberos');

http://git-wip-us.apache.org/repos/asf/ambari/blob/ef418377/ambari-web/app/controllers/main/admin/stack_upgrade_history_controller.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/controllers/main/admin/stack_upgrade_history_controller.js b/ambari-web/app/controllers/main/admin/stack_upgrade_history_controller.js
new file mode 100644
index 0000000..9bec825
--- /dev/null
+++ b/ambari-web/app/controllers/main/admin/stack_upgrade_history_controller.js
@@ -0,0 +1,217 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+var App = require('app');
+var stringUtils = require('utils/string_utils');
+
+App.MainAdminStackUpgradeHistoryController = Em.ArrayController.extend({
+  name: 'mainAdminStackUpgradeHistoryController',
+
+  startIndex: 1,
+
+  resetStartIndex: false,
+
+  /**
+   * status of tasks/items/groups which should be grayed out and disabled
+   * @type {Array}
+   */
+  nonActiveStates: ['PENDING'],
+
+  /**
+   * mutable properties of Upgrade Task
+   * @type {Array}
+   */
+  taskDetailsProperties: ['status', 'stdout', 'stderr', 'error_log', 'host_name', 'output_log'],
+
+  /**
+   * Current upgrade record clicked on the UI
+   * @type {App.StackUpgradeHistory|null}
+   * */
+  currentUpgradeRecord: null,
+
+  isDowngrade: false,
+
+  upgradeData: null,
+
+  /**
+   * List of all <code>App.StackUpgradeHistory</code>. Latest one at the beginning
+   * @type {App.StackUpgradeHistory[]}
+   */
+  content: App.StackUpgradeHistory.find(),
+
+  upgradeHistoryUrl: function() {
+    return App.get('apiPrefix') + '/clusters/' + App.get('clusterName') + '/upgrades?fields=Upgrade';
+  }.property('App.clusterName'),
+
+  upgradeUpgradeRecordUrl: function() {
+    var record = this.get('currentUpgradeRecord');
+    return App.get('apiPrefix') + '/clusters/' + App.get('clusterName') + '/upgrades/' + record.get('requestId');
+  }.property('App.clusterName', 'currentUpgradeRecord'),
+
+  loadStackUpgradeHistoryToModel: function () {
+    console.log('Load stack upgrade history');
+    var dfd = $.Deferred();
+    App.HttpClient.get(this.get('upgradeHistoryUrl'), App.stackUpgradeHistoryMapper, {
+      complete: function () {
+        dfd.resolve();
+      }
+    });
+    return dfd.promise();
+  },
+
+  loadStackUpgradeRecord: function () {
+    var record = this.get('currentUpgradeRecord');
+    this.set('isDowngrade', ('DOWNGRADE' == record.get('direction')))
+    var dfd = $.Deferred();
+    var self = this;
+    if (record != null) {
+      App.ajax.send({
+        name: 'admin.upgrade.data',
+        sender: this,
+        data: {
+          id: record.get('requestId')
+        },
+        success: 'loadUpgradeRecordSuccessCallback'
+      }).then(dfd.resolve).complete(function () {
+      });
+    } else {
+      dfd.resolve();
+    }
+    return dfd.promise();
+  },
+
+  loadUpgradeRecordSuccessCallback: function(newData){
+    if (Em.isNone(newData)) {
+      var record = this.get('currentUpgradeRecord');
+      console.debug('No data returned for upgrad record ' + record.get('requestId'))
+      return;
+    }
+    var upgradeGroups = [];
+    if (newData.upgrade_groups) {
+      nonActiveStates = this.get('nonActiveStates'),
+      //wrap all entities into App.finishedUpgradeEntity
+      newData.upgrade_groups.forEach(function (newGroup) {
+      var hasExpandableItems = newGroup.upgrade_items.some(function (item) {
+            return !nonActiveStates.contains(item.UpgradeItem.status);
+          }),
+          oldGroup = App.finishedUpgradeEntity.create({type: 'GROUP', hasExpandableItems: hasExpandableItems}, newGroup.UpgradeGroup),
+          upgradeItems = [];
+        newGroup.upgrade_items.forEach(function (item) {
+          var oldItem = App.finishedUpgradeEntity.create({type: 'ITEM'}, item.UpgradeItem);
+          this.formatMessages(oldItem);
+          oldItem.set('tasks', []);
+          upgradeItems.pushObject(oldItem);
+        }, this);
+        upgradeItems.reverse();
+        oldGroup.set('upgradeItems', upgradeItems);
+        upgradeGroups.pushObject(oldGroup);
+      }, this);
+      upgradeGroups.reverse();
+      this.set('upgradeData', Em.Object.create({
+        upgradeGroups: upgradeGroups,
+        upgrade_groups: newData.upgrade_groups,
+        Upgrade: newData.Upgrade
+      }));
+    }
+  },
+
+  /**
+   * format upgrade item text
+   * @param {App.finishedUpgradeEntity} oldItem
+   */
+  formatMessages: function (oldItem) {
+    var text = oldItem.get('text');
+    var messages = [];
+
+    try {
+      var messageArray = JSON.parse(text);
+      for (var i = 0; i < messageArray.length; i++) {
+        messages.push(messageArray[i].message);
+      }
+      oldItem.set('text', messages.join(' '));
+    } catch (err) {
+      console.warn('Upgrade Item has malformed text');
+    }
+    oldItem.set('messages', messages);
+  },
+
+  /**
+   * request Upgrade Item and its tasks from server
+   * @param {Em.Object} item
+   * @param {Function} customCallback
+   * @return {$.ajax}
+   */
+  getUpgradeItem: function (item) {
+    return App.ajax.send({
+      name: 'admin.upgrade.upgrade_item',
+      sender: this,
+      data: {
+        upgradeId: item.get('request_id'),
+        groupId: item.get('group_id'),
+        stageId: item.get('stage_id')
+      },
+      success: 'getUpgradeItemSuccessCallback'
+    });
+  },
+
+  /**
+   * success callback of <code>getTasks</code>
+   * @param {object} data
+   */
+  getUpgradeItemSuccessCallback: function (data) {
+    this.get('upgradeData.upgradeGroups').forEach(function (group) {
+      if (group.get('group_id') === data.UpgradeItem.group_id) {
+        group.get('upgradeItems').forEach(function (item) {
+          if (item.get('stage_id') === data.UpgradeItem.stage_id) {
+            if (item.get('tasks.length')) {
+              data.tasks.forEach(function (task) {
+                var currentTask = item.get('tasks').findProperty('id', task.Tasks.id);
+                this.get('taskDetailsProperties').forEach(function (property) {
+                  currentTask.set(property, task.Tasks[property]);
+                }, this);
+              }, this);
+            } else {
+              var tasks = [];
+              data.tasks.forEach(function (task) {
+                tasks.pushObject(App.finishedUpgradeEntity.create({type: 'TASK'}, task.Tasks));
+              });
+              item.set('tasks', tasks);
+            }
+            item.set('isTasksLoaded', true);
+          }
+        }, this);
+      }
+    }, this);
+  },
+
+  /**
+   * status of Upgrade request
+   * @type {string}
+   */
+  requestStatus: function () {
+    if (this.get('upgradeData')){
+      if (this.get('upgradeData.Upgrade')) {
+        return this.get('upgradeData.Upgrade.request_status');
+      } else {
+        return '';
+      }
+    } else {
+      return ''
+    }
+  }.property('upgradeData.Upgrade.request_status'),
+});

http://git-wip-us.apache.org/repos/asf/ambari/blob/ef418377/ambari-web/app/mappers.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/mappers.js b/ambari-web/app/mappers.js
index 96193bc..cde9bcc 100644
--- a/ambari-web/app/mappers.js
+++ b/ambari-web/app/mappers.js
@@ -43,4 +43,5 @@ require('mappers/alert_groups_mapper');
 require('mappers/alert_notification_mapper');
 require('mappers/root_service_mapper');
 require('mappers/widget_mapper');
-require('mappers/widget_layout_mapper');
\ No newline at end of file
+require('mappers/widget_layout_mapper');
+require('mappers/stack_upgrade_history_mapper');
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/ef418377/ambari-web/app/mappers/stack_upgrade_history_mapper.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/mappers/stack_upgrade_history_mapper.js b/ambari-web/app/mappers/stack_upgrade_history_mapper.js
new file mode 100644
index 0000000..63088a7
--- /dev/null
+++ b/ambari-web/app/mappers/stack_upgrade_history_mapper.js
@@ -0,0 +1,54 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+var App = require('app');
+var stringUtils = require('utils/string_utils');
+
+App.stackUpgradeHistoryMapper = App.QuickDataMapper.create({
+  model: App.StackUpgradeHistory,
+
+  config: {
+    "id": "Upgrade.request_id",
+    "request_id": "Upgrade.request_id",
+    "cluster_name": "Upgrade.cluster_name",
+    "direction": "Upgrade.direction",
+    "from_version": "Upgrade.from_version",
+    "to_version": "Upgrade.to_version",
+    "end_time":"Upgrade.end_time",
+    "start_time":"Upgrade.start_time",
+    "create_time": "Upgrade.create_time",
+    "request_status": "Upgrade.request_status",
+    "upgrade_type": "Upgrade.upgrade_type",
+    "downgrade_allowed": "Upgrade.downgrade_allowed",
+    "skip_failures": "Upgrade.skip_failures",
+    "skip_service_check_failures": "Upgrade.skip_service_check_failures"
+  },
+
+  map: function (json) {
+    App.set('isStackUpgradeHistoryLoaded',false);
+    var model = this.get('model');
+    var result = [];
+    json.items.forEach(function(item) {
+      var parseResult = this.parseIt(item, this.get('config'));
+      result.push(parseResult);
+    }, this);
+
+    App.store.loadMany(this.get('model'), result);
+    App.store.commit();
+    App.set('isStackUpgradeHistoryLoaded',true);
+  },
+});

http://git-wip-us.apache.org/repos/asf/ambari/blob/ef418377/ambari-web/app/messages.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/messages.js b/ambari-web/app/messages.js
index 7c3ccd3..5c7e0e8 100644
--- a/ambari-web/app/messages.js
+++ b/ambari-web/app/messages.js
@@ -130,6 +130,7 @@ Em.I18n.translations = {
   'common.open':'Open',
   'common.copy':'Copy',
   'common.complete':'Complete',
+  'common.completed':'Completed',
   'common.metrics':'Metrics',
   'common.timeRange':'Time Range',
   'common.name':'Name',
@@ -172,6 +173,7 @@ Em.I18n.translations = {
   'common.recommission':'Recommission',
   'common.failure': 'Failure',
   'common.type': 'Type',
+  'common.direction': 'Direction',
   'common.close': 'Close',
   'common.warning': 'Warning',
   'common.critical': 'Critical',
@@ -192,6 +194,7 @@ Em.I18n.translations = {
   'common.repositories':'Repositories',
   'common.stack.versions':'Stack Versions',
   'common.versions':'Versions',
+  'common.upgrade.history':'Upgrade History',
   'common.serviceAccounts': 'Service Accounts',
   'common.add': 'Add',
   'common.edit': 'Edit',
@@ -203,6 +206,7 @@ Em.I18n.translations = {
   'common.details':'Details',
   'common.stats':'Stats',
   'common.abort': 'Abort',
+  'common.aborted': 'Aborted',
   'common.misc': 'Misc',
   'common.userSettings': 'User Settings',
   'common.aboutAmbari': 'About',
@@ -334,6 +338,14 @@ Em.I18n.translations = {
   'common.logs': 'Logs',
   'common.warn.message': '<div class="alert alert-warn">{0}</div>',
   'common.link': 'Link',
+  'common.from.version': 'From Version',
+  'common.to.version': 'To Version',
+  'common.start.time': 'Start Time',
+  'common.end.time': 'End Time',
+  'common.rolling': 'Rolling',
+  'common.express': 'Express',
+  'common.rolling.downgrade': 'Rolling Downgrade',
+  'common.express.downgrade': 'Express Downgrade',
 
   'models.alert_instance.tiggered.verbose': "Occurred on {0} <br> Checked on {1}",
   'models.alert_definition.triggered.verbose': "Occurred on {0}",
@@ -1728,6 +1740,22 @@ Em.I18n.translations = {
   'admin.stackVersions.hosts.popup.primary': "Go to Hosts",
 
   'admin.stackVersions.details.install.hosts.popup.title': "Install {0} version",
+  'admin.stackVersions.upgradeHistory.upgrade': 'Upgrades',
+  'admin.stackVersions.upgradeHistory.downgrade': 'Downgrades',
+  'admin.stackVersions.upgradeHistory.show.details': 'Click to show more details on {0}',
+  'admin.stackVersions.upgradeHistory.success': 'Sucessful {0}',
+  'admin.stackVersions.upgradeHistory.aborted': 'Aborted {0}',
+  'admin.stackVersions.upgradeHistory.summary': 'Summary',
+  'admin.stackVersions.upgradeHistory.history': 'History',
+  'admin.stackVersions.upgradeHistory.filter.all': 'All ({0})',
+  'admin.stackVersions.upgradeHistory.filter.upgrade': 'Upgrade ({0})',
+  'admin.stackVersions.upgradeHistory.filter.downgrade': 'Downgrade ({0})',
+  'admin.stackVersions.upgradeHistory.filter.successful.upgrade': 'Successful Upgrade ({0})',
+  'admin.stackVersions.upgradeHistory.filter.successful.downgrade': 'Successful Downgrade ({0})',
+  'admin.stackVersions.upgradeHistory.filter.aborted.upgrade': 'Aborted Upgrade ({0})',
+  'admin.stackVersions.upgradeHistory.filter.aborted.downgrade': 'Aborted Downgrade ({0})',
+  'admin.stackVersions.upgradeHistory.no.history': 'No upgrade/downgrade history available',
+  'admin.stackVersions.upgradeHistory.record.title': '{0} {1} to {2}',
 
   'admin.stackUpgrade.preCheck.warning.message': "{0} Warning {1}",
   'admin.stackUpgrade.preCheck.bypass.message': "{0} Error {1}",

http://git-wip-us.apache.org/repos/asf/ambari/blob/ef418377/ambari-web/app/models.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/models.js b/ambari-web/app/models.js
index b877255..e0168a2 100644
--- a/ambari-web/app/models.js
+++ b/ambari-web/app/models.js
@@ -30,6 +30,7 @@ require('models/stack_version/repository_version');
 require('models/stack_version/os');
 require('models/stack_version/service_simple');
 require('models/stack_version/repository');
+require('models/stack_version/stack_upgrade_history');
 require('models/operating_system');
 require('models/repository');
 require('models/stack_service');
@@ -64,6 +65,7 @@ require('models/master_component');
 require('models/host_stack_version');
 require('models/root_service');
 require('models/upgrade_entity');
+require('models/finished_upgrade_entity');
 require('models/configs/theme/theme_condition');
 require('models/configs/service_config_version');
 require('models/configs/stack_config_property');

http://git-wip-us.apache.org/repos/asf/ambari/blob/ef418377/ambari-web/app/models/finished_upgrade_entity.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/models/finished_upgrade_entity.js b/ambari-web/app/models/finished_upgrade_entity.js
new file mode 100644
index 0000000..c08ef45
--- /dev/null
+++ b/ambari-web/app/models/finished_upgrade_entity.js
@@ -0,0 +1,92 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+/**
+ * @type {Ember.Object}
+ * @class
+ */
+App.finishedUpgradeEntity = Em.Object.extend({
+
+  /**
+   * type of entity "GROUP", "ITEM", "TASK"
+   * @type {string}
+   */
+  type: null,
+
+  /**
+   * @type {boolean}
+   */
+  isExpanded: false,
+
+  /**
+   * @type {boolean}
+   */
+  hasExpandableItems: false,
+
+  /**
+   * @type {boolean}
+   */
+  isVisible: Em.computed.notEqual('status', 'PENDING'),
+
+  /**
+   * status of tasks/items/groups which should be grayed out and disabled
+   * @type {Array}
+   */
+  nonActiveStates: ['PENDING'],
+
+  /**
+   * @type {boolean}
+   */
+  isRunning: Em.computed.existsIn('status', ['IN_PROGRESS']),
+
+  /**
+   * @type {number}
+   */
+  progress: function () {
+    return Math.floor(this.get('progress_percent'));
+  }.property('progress_percent'),
+
+  /**
+   * indicate whether entity has active link
+   * @type {boolean}
+   */
+  isActive: function () {
+    return !this.get('nonActiveStates').contains(this.get('status'));
+  }.property('status'),
+
+  /**
+   * indicate whether upgrade group should be expanded
+   * @type {boolean}
+   */
+  isExpandableGroup: function () {
+    return this.get('type') === 'GROUP' && (this.get('isActive') || this.get('hasExpandableItems'));
+  }.property('isActive', 'hasExpandableItems'),
+
+  upgradeItemStatus: Em.computed.firstNotBlank('display_status', 'status'),
+
+  /**
+   * @type {string}
+   */
+  upgradeGroupStatus: function () {
+    if (this.get('type') === 'GROUP' && !this.get('isActive') && this.get('hasExpandableItems')) {
+      return 'SUBITEM_FAILED';
+    }
+    return this.get('display_status') || this.get('status');
+  }.property('isExpandableGroup', 'display_status', 'status')
+});
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/ef418377/ambari-web/app/models/stack_version/stack_upgrade_history.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/models/stack_version/stack_upgrade_history.js b/ambari-web/app/models/stack_version/stack_upgrade_history.js
new file mode 100644
index 0000000..d4a89dc
--- /dev/null
+++ b/ambari-web/app/models/stack_version/stack_upgrade_history.js
@@ -0,0 +1,37 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+var App = require('app');
+
+App.StackUpgradeHistory = DS.Model.extend({
+  requestId: DS.attr('number'),
+  clusterName: DS.attr('string'),
+  direction: DS.attr('string'),
+  fromVersion: DS.attr('string'),
+  toVersion: DS.attr('string'),
+  requestStatus: DS.attr('string'),
+  upgradeType: DS.attr('string'),
+  downgradeAllowed: DS.attr('boolean'),
+  skipFailures: DS.attr('boolean'),
+  skipServiceCheckFailures: DS.attr('boolean'),
+  endTime: DS.attr('number'),
+  startTime: DS.attr('number'),
+  createTime: DS.attr('number'),
+});
+
+App.StackUpgradeHistory.FIXTURES = [];

http://git-wip-us.apache.org/repos/asf/ambari/blob/ef418377/ambari-web/app/routes/main.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/routes/main.js b/ambari-web/app/routes/main.js
index f0fe5d7..7e7b622 100644
--- a/ambari-web/app/routes/main.js
+++ b/ambari-web/app/routes/main.js
@@ -519,6 +519,13 @@ module.exports = Em.Route.extend(App.RouterRedirections, {
         }
       }),
 
+      upgradeHistory: Em.Route.extend({
+        route: '/history',
+        connectOutlets: function (router, context) {
+          router.get('mainAdminStackAndUpgradeController').connectOutlet('mainAdminStackUpgradeHistory');
+        },
+      }),
+
       stackNavigate: function (router, event) {
         var parent = event.view._parentView;
         parent.deactivateChildViews();

http://git-wip-us.apache.org/repos/asf/ambari/blob/ef418377/ambari-web/app/styles/stack_versions.less
----------------------------------------------------------------------
diff --git a/ambari-web/app/styles/stack_versions.less b/ambari-web/app/styles/stack_versions.less
index 921b418..715bbe3 100644
--- a/ambari-web/app/styles/stack_versions.less
+++ b/ambari-web/app/styles/stack_versions.less
@@ -420,6 +420,75 @@
   }
 }
 
+#stack-upgrade-record-dialog {
+  .details-box {
+    padding: 5px;
+    margin-left: 15px;
+    margin-right: 95px;
+    .button-row {
+      text-align: right;
+      padding: 5px;
+    }
+    input[type="checkbox"] {
+      margin: 0;
+    }
+    .message {
+      line-height: 30px;
+    }
+  }
+  .task-details {
+    .manage-controls a {
+      cursor: pointer;
+      margin-right: 12px;
+    }
+    textarea {
+      width: 100%;
+      min-height: 100px;
+      box-sizing: border-box;
+    }
+  }
+  .task-list {
+    overflow-x: hidden;
+    .progress {
+      margin-bottom: 0;
+    }
+    padding-left: 20px;
+    i {
+      margin-right: 5px;
+    }
+  }
+  .task-list-main-warp i {
+    font-size: 16px;
+  }
+  ul.failed-info-list {
+    max-height: 500px;
+    margin-top: 5px;
+  }
+  .upgrade-options-link {
+    position: absolute;
+    cursor: pointer;
+    right: 10%;
+    top: 13px;
+    width: 100px;
+    a {
+      font-size: 13px;
+    }
+    .icon-cogs {
+      color: #0088cc;
+      margin-right: 3px;
+    }
+  }
+  .upgrade-options-link.disabled {
+    cursor: not-allowed;
+    a, .icon-cogs {
+      color: #808080;
+    }
+    a:hover {
+      text-decoration: none;
+    }
+  }
+}
+
 .repository-list {
   .os-block {
     border-top: 1px solid #dddddd;

http://git-wip-us.apache.org/repos/asf/ambari/blob/ef418377/ambari-web/app/templates/main/admin/stack_upgrade/upgrade_history.hbs
----------------------------------------------------------------------
diff --git a/ambari-web/app/templates/main/admin/stack_upgrade/upgrade_history.hbs b/ambari-web/app/templates/main/admin/stack_upgrade/upgrade_history.hbs
new file mode 100644
index 0000000..6d4a32d
--- /dev/null
+++ b/ambari-web/app/templates/main/admin/stack_upgrade/upgrade_history.hbs
@@ -0,0 +1,105 @@
+{{!
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+}}
+
+<div id="upgrade-history-section">
+  <div {{bindAttr class="view.isReady:hide:show :screensaver :no-borders "}}>
+    {{view App.SpinnerView}}
+  </div>
+  <div {{bindAttr class="view.isReady::hidden"}}>
+	  <div class="btn-group display-inline-block">
+	    <a class="btn dropdown-toggle" data-toggle="dropdown" href="#">
+	      <span class="filters-label">{{t common.filter}}: </span>
+	        <span>
+	          {{view.selectedCategory.label}}
+	          <span class="caret"></span>
+	        </span>
+	    </a>
+	    <ul class="dropdown-menu">
+	      {{#each category in view.categories}}
+	        <li>
+	          <a {{action selectCategory category target="view"}} href="#">
+	            {{category.label}}
+	          </a>
+	        </li>
+	      {{/each}}
+	    </ul>
+	  </div>
+	  <br/>
+	  <br/>
+	  <table class="table advanced-header-table table-striped" id="upgrade-summary-table">
+	    <thead>
+	      <th>{{t common.direction}}</th>
+	      <th>{{t common.type}}</th>
+	      <th>{{t common.from.version}}</th>
+	      <th>{{t common.to.version}}</th>
+	      <th>{{t common.start.time}}</th>
+	      <th>{{t common.duration}}</th>
+	      <th>{{t common.end.time}}</th>
+	      <th>{{t common.status}}</th>
+	    </thead>
+	    <tbody>
+		    {{#if view.pageContent}}
+		      {{#each item in view.pageContent}}
+		        <tr>
+		          <td class='name'>
+		            <span class="trim_hostname">
+		              <a href="#" class="black" {{action "showUpgradeHistoryRecord" item target="view"}}>
+		                {{unbound item.directionLabel}}
+		              </a>
+		            </span>
+		          </td>
+		          <td>
+		            <span>{{item.upgradeTypeLabel}}</span>
+		          </td>
+		          <td>
+		            <span>{{item.fromVersion}}</span>
+		          </td>
+		          <td>
+		            <span>{{item.toVersion}}</span>
+		          </td>
+		          <td>
+		            <span>{{item.startTimeLabel}}</span>
+		          </td>
+		          <td>
+	              <span>{{item.duration}}</span>
+	            </td>
+		          <td>
+		            <span>{{item.endTimeLabel}}</span>
+		          </td>
+		          <td>
+		            <span>{{item.requestStatus}}</span>
+		          </td>
+		        </tr>
+		      {{/each}}
+		    {{/if}}
+	    </tbody>
+	  </table>
+	  <div class="page-bar">
+	    <div class="items-on-page">
+	      <label>{{t common.show}}: {{view view.rowsPerPageSelectView selectionBinding="view.displayLength"}}</label>
+	    </div>
+	    <div class="info">{{view.paginationInfo}}</div>
+	    <div class="paging_two_button">
+	      <a {{bindAttr class="view.paginationLeftClass"}}{{action previousPage target="view"}}><i
+	              class="icon-arrow-left"></i></a>
+	      <a {{bindAttr class="view.paginationRightClass"}}{{action nextPage target="view"}}><i
+	              class="icon-arrow-right"></i></a>
+	    </div>
+	  </div>
+  </div>
+</div>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/ef418377/ambari-web/app/templates/main/admin/stack_upgrade/upgrade_history_details.hbs
----------------------------------------------------------------------
diff --git a/ambari-web/app/templates/main/admin/stack_upgrade/upgrade_history_details.hbs b/ambari-web/app/templates/main/admin/stack_upgrade/upgrade_history_details.hbs
new file mode 100644
index 0000000..2d431f9
--- /dev/null
+++ b/ambari-web/app/templates/main/admin/stack_upgrade/upgrade_history_details.hbs
@@ -0,0 +1,46 @@
+{{!
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+}}
+
+<div id="stack-upgrade-record-dialog">
+  <div {{bindAttr class="view.isLoaded::hidden :row-fluid"}}>
+    <div class="span3 task-list-main-warp">{{statusIcon controller.requestStatus}}
+      &nbsp;{{view.upgradeStatusLabel}}</div>
+    <div class="span8">
+      {{view App.ProgressBarView
+        progressBinding="view.overallProgress"
+        statusBinding="controller.requestStatus"
+      }}
+    </div>
+    <div class="span1">
+      {{view.overallProgress}}%
+    </div>
+  </div>
+
+  <div class="task-list scrollable-block task-list-main-warp">
+    {{#if view.isReady}}
+      {{#each group in controller.upgradeData.upgradeGroups}}
+        {{#if group.isVisible}}
+          {{view App.upgradeGroupView contentBinding="group"}}
+        {{/if}}
+      {{/each}}
+    {{/if}}
+  </div>
+  {{#unless view.isReady}}
+    {{view App.SpinnerView}}
+  {{/unless}}
+</div>

http://git-wip-us.apache.org/repos/asf/ambari/blob/ef418377/ambari-web/app/views.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views.js b/ambari-web/app/views.js
index 0d9dc3f..0dac227 100644
--- a/ambari-web/app/views.js
+++ b/ambari-web/app/views.js
@@ -213,6 +213,8 @@ require('views/main/admin/stack_upgrade/upgrade_version_box_view');
 require('views/main/admin/stack_upgrade/upgrade_version_column_view');
 require('views/main/admin/stack_upgrade/upgrade_group_view');
 require('views/main/admin/stack_upgrade/upgrade_task_view');
+require('views/main/admin/stack_upgrade/upgrade_history_view');
+require('views/main/admin/stack_upgrade/upgrade_history_details_view');
 require('views/main/admin/stack_upgrade/services_view');
 require('views/main/admin/stack_upgrade/versions_view');
 require('views/main/admin/stack_upgrade/menu_view');

http://git-wip-us.apache.org/repos/asf/ambari/blob/ef418377/ambari-web/app/views/main/admin/stack_upgrade/menu_view.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views/main/admin/stack_upgrade/menu_view.js b/ambari-web/app/views/main/admin/stack_upgrade/menu_view.js
index 1e84f1c..db5f946 100644
--- a/ambari-web/app/views/main/admin/stack_upgrade/menu_view.js
+++ b/ambari-web/app/views/main/admin/stack_upgrade/menu_view.js
@@ -35,6 +35,12 @@ App.MainAdminStackMenuView = Em.CollectionView.extend({
         label: Em.I18n.t('common.versions'),
         routing: 'versions',
         hidden: !App.get('stackVersionsAvailable')
+      }),
+      Em.Object.create({
+        name: 'upgradeHistory',
+        label: Em.I18n.t('common.upgrade.history'),
+        routing: 'upgradeHistory',
+        hidden: !App.get('stackVersionsAvailable')
       })
     ]
   }.property('App.stackVersionsAvailable'),

http://git-wip-us.apache.org/repos/asf/ambari/blob/ef418377/ambari-web/app/views/main/admin/stack_upgrade/upgrade_history_details_view.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views/main/admin/stack_upgrade/upgrade_history_details_view.js b/ambari-web/app/views/main/admin/stack_upgrade/upgrade_history_details_view.js
new file mode 100644
index 0000000..983c26a
--- /dev/null
+++ b/ambari-web/app/views/main/admin/stack_upgrade/upgrade_history_details_view.js
@@ -0,0 +1,85 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+var App = require('app');
+var date = require('utils/date/date');
+
+App.MainAdminStackUpgradeHistoryDetailsView = Em.View.extend({
+  controllerBinding: 'App.router.mainAdminStackUpgradeHistoryController',
+  templateName: require('templates/main/admin/stack_upgrade/upgrade_history_details'),
+  isReady: false,
+
+  willInsertElement: function(){
+    var self = this;
+    this.get('controller').loadStackUpgradeRecord().done(function(){
+      self.populateUpgradeHistoryRecord();
+    });
+  },
+
+  willDestroyElement: function () {
+    this.set('isReady', false);
+  },
+
+  populateUpgradeHistoryRecord: function(){
+    var upgradeData = this.get('controller').get('upgradeData')
+    this.set('isReady', (upgradeData != null))
+  },
+
+  /**
+   * progress value is rounded to floor
+   * @type {number}
+   */
+  overallProgress: function () {
+    return Math.floor(this.get('controller.upgradeData.Upgrade.progress_percent'));
+  }.property('controller.upgradeData.Upgrade.progress_percent'),
+
+  /**
+   * label of Upgrade status
+   * @type {string}
+   */
+  upgradeStatusLabel: function() {
+    var labelKey = null;
+    switch (this.get('controller.upgradeData.Upgrade.request_status')) {
+      case 'QUEUED':
+      case 'PENDING':
+      case 'IN_PROGRESS':
+        labelKey = 'admin.stackUpgrade.state.inProgress';
+        break;
+      case 'COMPLETED':
+        labelKey = 'admin.stackUpgrade.state.completed';
+        break;
+      case 'ABORTED':
+        labelKey = 'admin.stackUpgrade.state.paused';
+        break;
+      case 'TIMEDOUT':
+      case 'FAILED':
+      case 'HOLDING_FAILED':
+      case 'HOLDING_TIMEDOUT':
+      case 'HOLDING':
+        labelKey = 'admin.stackUpgrade.state.paused';
+        break;
+    }
+    if (labelKey) {
+      labelKey += (this.get('controller.isDowngrade')) ? '.downgrade' : "";
+      return Em.I18n.t(labelKey);
+    } else {
+      return "";
+    }
+  }.property('controller.upgradeData.Upgrade.request_status', 'controller.isDowngrade'),
+});
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/ef418377/ambari-web/app/views/main/admin/stack_upgrade/upgrade_history_view.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views/main/admin/stack_upgrade/upgrade_history_view.js b/ambari-web/app/views/main/admin/stack_upgrade/upgrade_history_view.js
new file mode 100644
index 0000000..ef5f46b
--- /dev/null
+++ b/ambari-web/app/views/main/admin/stack_upgrade/upgrade_history_view.js
@@ -0,0 +1,303 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+var App = require('app');
+var date = require('utils/date/date');
+
+App.MainAdminStackUpgradeHistoryView = App.TableView.extend(App.TableServerViewMixin, {
+
+  controllerBinding: 'App.router.mainAdminStackUpgradeHistoryController',
+
+  templateName: require('templates/main/admin/stack_upgrade/upgrade_history'),
+
+  summary: [],
+
+  isReady: false,
+
+  categories: [
+    Em.Object.create({
+      labelKey: 'admin.stackVersions.upgradeHistory.filter.all',
+      value: 'ALL',
+      isSelected: true
+    }),
+    Em.Object.create({
+      labelKey: 'admin.stackVersions.upgradeHistory.filter.upgrade',
+      value: 'UPGRADE',
+      isSelected: false
+    }),
+    Em.Object.create({
+      labelKey: 'admin.stackVersions.upgradeHistory.filter.downgrade',
+      value: 'DOWNGRADE',
+      isSelected: false
+    }),
+    Em.Object.create({
+      labelKey: 'admin.stackVersions.upgradeHistory.filter.successful.upgrade',
+      value: 'UPGRADE_COMPLETED',
+      isSelected: false
+    }),
+    Em.Object.create({
+      labelKey: 'admin.stackVersions.upgradeHistory.filter.aborted.upgrade',
+      value: 'UPGRADE_ABORTED',
+      isSelected: false
+    }),
+    Em.Object.create({
+      labelKey: 'admin.stackVersions.upgradeHistory.filter.successful.downgrade',
+      value: 'DOWNGRADE_COMPLETED',
+      isSelected: false
+    }),
+    Em.Object.create({
+      labelKey: 'admin.stackVersions.upgradeHistory.filter.aborted.downgrade',
+      value: 'DOWNGRADE_ABORTED',
+      isSelected: false
+    }),
+  ],
+
+  /**
+   * @type {object}
+   */
+  selectedCategory: Em.computed.findBy('categories', 'isSelected', true),
+
+  filteredCount: function(){
+    var filteredContent = this.get('filteredContent').toArray();
+    return filteredContent.length;
+  }.property('filteredContent'),
+
+  /**
+   * displaying content filtered by upgrade type and upgrade status.
+   */
+  filteredContent: function () {
+    var result = [];
+    var filterValue = 'ALL';
+    var category = this.get('selectedCategory');
+    if (category)
+      filterValue = category.get('value');
+    var result = this.filterBy(filterValue);
+    return result.reverse();
+  }.property('selectedCategory'),
+
+  /**
+   * sort and slice recieved content by pagination parameters
+   */
+  pageContent: function () {
+    var content = this.get('filteredContent').toArray();
+    content = this.processForDisplay(content);
+    content = content.slice(this.get('startIndex') - 1, this.get('endIndex'))
+    return content;
+  }.property('filteredContent', 'startIndex', 'endIndex'),
+
+  processForDisplay: function(content){
+    var processedContent = [];
+    content.forEach(function(item){
+      if('UPGRADE' == item.get('direction'))
+        item.set('directionLabel', Em.I18n.t('common.upgrade'));
+      else
+        item.set('directionLabel', Em.I18n.t('common.downgrade'));
+
+      if('NON_ROLLING' == item.get('upgradeType'))
+        item.set('upgradeTypeLabel', Em.I18n.t('common.express'));
+      else
+        item.set('upgradeTypeLabel', Em.I18n.t('common.rolling'));
+
+      item.set('startTimeLabel', date.startTime(item.get('startTime')));
+      item.set('endTimeLabel', date.startTime(item.get('endTime')));
+      item.set('duration', date.durationSummary(item.get('startTime'), item.get('endTime')));
+      processedContent.push(item);
+    },this);
+    return processedContent;
+  },
+
+  paginationLeftClass: function () {
+    if (this.get("startIndex") > 1) {
+      return "paginate_previous";
+    }
+    return "paginate_disabled_previous";
+  }.property("startIndex", 'filteredCount'),
+
+  /**
+   * Determines how display "next"-link - as link or text
+   * @type {string}
+   */
+  paginationRightClass: function () {
+    if (this.get("endIndex") < this.get("filteredCount")) {
+      return "paginate_next";
+    }
+    return "paginate_disabled_next";
+  }.property("endIndex", 'filteredCount'),
+
+  /**
+   * Show previous-page if user not in the first page
+   * @method previousPage
+   */
+  previousPage: function () {
+    if (this.get('paginationLeftClass') === 'paginate_previous') {
+      this._super();
+    }
+  },
+
+  /**
+   * Show next-page if user not in the last page
+   * @method nextPage
+   */
+  nextPage: function () {
+    if (this.get('paginationRightClass') === 'paginate_next') {
+      this._super();
+    }
+  },
+
+  willInsertElement: function(){
+    var self = this;
+    this.get('controller').loadStackUpgradeHistoryToModel().done(function(){
+      self.populateUpgradeHistorySummary();
+    });
+  },
+
+  didInsertElement: function () {
+    this.observesCategories();
+  },
+
+  observesCategories: function(){
+    this.get('categories').forEach(function (category) {
+      var label = Em.I18n.t(category.labelKey).format(this.filterBy(category.value).length);
+      category.set('label', label)
+    }, this);
+  }.observes('isReady'),
+
+  filterBy: function(filterValue){
+    if ('ALL' == filterValue) {
+      var all_records = App.StackUpgradeHistory.find();
+      return all_records.toArray();
+    } else {
+      var tokens = filterValue.split('_');
+      var direction_token = null
+      var status_token = null
+
+      if (tokens.length == 1) {
+        direction_token = tokens[0]
+      } else if (tokens.length > 1) {
+        direction_token = tokens[0]
+        status_token = tokens[1]
+      }
+
+      var result = []
+      App.StackUpgradeHistory.find().forEach(function(item){
+        var direction = item.get('direction');
+        if (direction == direction_token) {
+          if (status_token != null) {
+            //only for the given status
+            var status = item.get('requestStatus');
+            if (status == status_token) {
+              result.push(item)
+            }
+          } else {
+            //regardless status
+            result.push(item)
+          }
+        }
+      }, this);
+      return result
+    }
+  },
+
+  selectCategory: function(event){
+    this.get('categories').filterProperty('isSelected').setEach('isSelected', false);
+    event.context.set('isSelected', true);
+  },
+
+  populateUpgradeHistorySummary: function(){
+    this.set('isReady', false);
+    var result = [
+      Em.Object.create({
+        direction: 'UPGRADE',
+        label:Em.I18n.t('common.upgrade'),
+        hasSuccess: false,
+        success:0,
+        hasAbort: false,
+        abort:0,
+      }),
+      Em.Object.create({
+        direction: 'DOWNGRADE',
+        label:Em.I18n.t('common.downgrade'),
+        hasSuccess: false,
+        success:0,
+        hasAbort: false,
+        abort:0,
+      })
+    ];
+
+    App.StackUpgradeHistory.find().forEach(function(item){
+      var direction = item.get('direction');
+      var status = item.get('requestStatus');
+      if('UPGRADE' == direction){
+        if('COMPLETED' == status){
+          result[0].set('success', result[0].get('success') + 1);
+        } else if ('ABORTED' == status) {
+          result[0].set('abort', result[0].get('abort') + 1);
+        }
+      } else if('DOWNGRADE' == direction){
+        if('COMPLETED' == status){
+          result[1].set('success', result[1].get('success')+1);
+        } else if ('ABORTED' == status){
+          result[1].set('abort', result[1].get('abort')+1);
+        }
+      }
+    }, this);
+
+    result[0].set('hasSuccess', result[0].get('success') > 0);
+    result[1].set('hasSuccess', result[1].get('success') > 0);
+    result[0].set('hasAbort', result[0].get('abort') > 0);
+    result[1].set('hasAbort', result[1].get('abort') > 0);
+
+    this.set('summary', result);
+    this.set('isReady', true);
+  },
+
+  showUpgradeHistoryRecord: function(event) {
+    var record = event.context
+    var title = '';
+    var direction = App.format.normalizeName(record.get('direction'));
+    var type = record.get('upgradeType')
+    if ('ROLLING' == type)
+      type = App.format.normalizeName(type);
+    else if ('NON_ROLLING' == type)
+      type = 'Express'
+
+    title = Em.I18n.t('admin.stackVersions.upgradeHistory.record.title').format(type, direction, record.get('fromVersion'));
+
+    this.get('controller').set('currentUpgradeRecord', record)
+
+    App.ModalPopup.show({
+      classNames: ['full-width-modal'],
+      header: title,
+      bodyClass: App.MainAdminStackUpgradeHistoryDetailsView,
+      primary: Em.I18n.t('common.dismiss'),
+      secondary: null,
+      didInsertElement: function () {
+        this._super();
+        this.fitHeight();
+        this.fitInnerHeight();
+      },
+      fitInnerHeight: function () {
+        var block = this.$().find('#modal > .modal-body');
+        var scrollable = this.$().find('#modal .scrollable-block');
+        scrollable.css('max-height', Number(block.css('max-height').slice(0, -2)) - block.height());
+        block.css('max-height', 'none');
+      },
+    });
+  },
+});

http://git-wip-us.apache.org/repos/asf/ambari/blob/ef418377/ambari-web/test/controllers/main/admin/stack_upgrade_history_controller_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/controllers/main/admin/stack_upgrade_history_controller_test.js b/ambari-web/test/controllers/main/admin/stack_upgrade_history_controller_test.js
new file mode 100644
index 0000000..bbfce19
--- /dev/null
+++ b/ambari-web/test/controllers/main/admin/stack_upgrade_history_controller_test.js
@@ -0,0 +1,125 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+var App = require('app');
+require('controllers/main/admin/stack_upgrade_history_controller');
+require('utils/string_utils');
+var testHelpers = require('test/helpers');
+describe('App.MainAdminStackUpgradeHistoryController', function() {
+
+  var controller = App.MainAdminStackUpgradeHistoryController.create({
+
+  });
+
+  describe("#upgradeHistoryUrl", function() {
+    before(function () {
+      this.mock = sinon.stub(App, 'get');
+      this.mock.withArgs('apiPrefix').returns('apiPrefix')
+        .withArgs('clusterName').returns('clusterName');
+    });
+    after(function () {
+      this.mock.restore();
+    });
+    it("should be valid", function() {
+      controller.propertyDidChange('upgradeHistoryUrl');
+      expect(controller.get('upgradeHistoryUrl')).to.equal('apiPrefix/clusters/clusterName/upgrades?fields=Upgrade');
+    });
+  });
+
+  describe("#requestStatus", function() {
+    beforeEach(function() {
+      this.mock = sinon.stub(App, 'get');
+    });
+    afterEach(function() {
+      this.mock.restore();
+    });
+    it("state should be what the record states", function() {
+      this.mock.returns(false);
+      controller.set('upgradeData', { Upgrade: {request_status: 'COMPLETED'}});
+      controller.propertyDidChange('requestStatus');
+      expect(controller.get('requestStatus')).to.equal('COMPLETED');
+    });
+
+    it("upgradeData is null", function() {
+      this.mock.returns(false);
+      controller.set('upgradeData', null);
+      controller.propertyDidChange('requestStatus');
+      expect(controller.get('requestStatus')).to.be.empty;
+    });
+  });
+
+  describe("#loadStackUpgradeRecord()", function() {
+    it("get upgrade record data", function() {
+      controller.set('currentUpgradeRecord', Em.Object.create({'requestId':1, 'direction':'DOWNGRADE'}));
+      controller.loadStackUpgradeRecord();
+      var args = testHelpers.findAjaxRequest('name', 'admin.upgrade.data');
+      expect(args[0]).to.exists;
+      expect(args[0].sender).to.be.eql(controller);
+      expect(args[0].data).to.be.eql({
+        id: 1
+      });
+    });
+  });
+
+  describe("#loadUpgradeRecordSuccessCallback()", function() {
+    it("correct data", function() {
+      var data = {
+        "Upgrade": {
+          "request_status": "COMPLETED"
+        },
+        "upgrade_groups": [
+          {
+            "UpgradeGroup": {
+              "id": 1
+            },
+            "upgrade_items": []
+          }
+        ]};
+      controller.loadUpgradeRecordSuccessCallback(data);
+      expect(controller.get('upgradeData') == null).to.be.false;
+    });
+
+    it("data is null", function() {
+      var data = null;
+      controller.set('upgradeData', null)
+      controller.loadUpgradeRecordSuccessCallback(data);
+      expect(controller.get('upgradeData') == null).to.be.true;
+    });
+  });
+
+  describe("#getUpgradeItem()", function() {
+    it("default callback", function() {
+      var item = Em.Object.create({
+        request_id: 1,
+        group_id: 2,
+        stage_id: 3
+      });
+      controller.getUpgradeItem(item);
+      var args = testHelpers.findAjaxRequest('name', 'admin.upgrade.upgrade_item');
+      expect(args[0]).to.exists;
+      expect(args[0].sender).to.be.eql(controller);
+      expect(args[0].success).to.be.equal('getUpgradeItemSuccessCallback');
+      expect(args[0].data).to.be.eql({
+        upgradeId: 1,
+        groupId: 2,
+        stageId: 3
+      });
+    });
+  });
+});

http://git-wip-us.apache.org/repos/asf/ambari/blob/ef418377/ambari-web/test/mappers/stack_upgrade_history_mapper_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/mappers/stack_upgrade_history_mapper_test.js b/ambari-web/test/mappers/stack_upgrade_history_mapper_test.js
new file mode 100644
index 0000000..07027e1
--- /dev/null
+++ b/ambari-web/test/mappers/stack_upgrade_history_mapper_test.js
@@ -0,0 +1,372 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+
+/*eslint-disable */
+
+var App = require('app');
+
+require('mappers/stack_upgrade_history_mapper');
+
+describe('App.stackUpgradeHistoryMapper', function () {
+
+  describe('#map', function () {
+
+    var data = {
+        "href" : "http://bdavm079.svl.ibm.com:8080/api/v1/clusters/bi/upgrades?fields=Upgrade",
+        "items" : [
+          {
+            "href" : "http://bdavm079.svl.ibm.com:8080/api/v1/clusters/bi/upgrades/7",
+            "Upgrade" : {
+              "cluster_name" : "bi",
+              "create_time" : 1463779169144,
+              "direction" : "UPGRADE",
+              "downgrade_allowed" : true,
+              "end_time" : 1463779266087,
+              "exclusive" : false,
+              "from_version" : "2.3.6.0-3712",
+              "pack" : "nonrolling-upgrade-2.4",
+              "progress_percent" : 100.0,
+              "request_context" : "Upgrading to 2.4.0.0-169",
+              "request_id" : 7,
+              "request_status" : "ABORTED",
+              "skip_failures" : false,
+              "skip_service_check_failures" : false,
+              "start_time" : 1463779170159,
+              "suspended" : false,
+              "to_version" : "2.4.0.0-169",
+              "type" : "INTERNAL_REQUEST",
+              "upgrade_type" : "NON_ROLLING"
+            }
+          },
+          {
+            "href" : "http://bdavm079.svl.ibm.com:8080/api/v1/clusters/bi/upgrades/8",
+            "Upgrade" : {
+              "cluster_name" : "bi",
+              "create_time" : 1463779266212,
+              "direction" : "DOWNGRADE",
+              "downgrade_allowed" : true,
+              "end_time" : 1463779299440,
+              "exclusive" : false,
+              "from_version" : "2.3.6.0-3712",
+              "pack" : "nonrolling-upgrade-2.4",
+              "progress_percent" : 100.0,
+              "request_context" : "Downgrading to 2.3.6.0-3712",
+              "request_id" : 8,
+              "request_status" : "COMPLETED",
+              "skip_failures" : false,
+              "skip_service_check_failures" : false,
+              "start_time" : 1463779267220,
+              "suspended" : false,
+              "to_version" : "2.3.6.0-3712",
+              "type" : "INTERNAL_REQUEST",
+              "upgrade_type" : "NON_ROLLING"
+            }
+          },
+          {
+            "href" : "http://bdavm079.svl.ibm.com:8080/api/v1/clusters/bi/upgrades/9",
+            "Upgrade" : {
+              "cluster_name" : "bi",
+              "create_time" : 1463780699654,
+              "direction" : "UPGRADE",
+              "downgrade_allowed" : true,
+              "end_time" : 1463780757685,
+              "exclusive" : false,
+              "from_version" : "2.3.6.0-3712",
+              "pack" : "nonrolling-upgrade-2.4",
+              "progress_percent" : 100.0,
+              "request_context" : "Upgrading to 2.4.0.0-169",
+              "request_id" : 9,
+              "request_status" : "ABORTED",
+              "skip_failures" : false,
+              "skip_service_check_failures" : false,
+              "start_time" : 1463780700670,
+              "suspended" : false,
+              "to_version" : "2.4.0.0-169",
+              "type" : "INTERNAL_REQUEST",
+              "upgrade_type" : "NON_ROLLING"
+            }
+          },
+          {
+            "href" : "http://bdavm079.svl.ibm.com:8080/api/v1/clusters/bi/upgrades/10",
+            "Upgrade" : {
+              "cluster_name" : "bi",
+              "create_time" : 1463780757799,
+              "direction" : "DOWNGRADE",
+              "downgrade_allowed" : true,
+              "end_time" : 1463780794009,
+              "exclusive" : false,
+              "from_version" : "2.3.6.0-3712",
+              "pack" : "nonrolling-upgrade-2.4",
+              "progress_percent" : 100.0,
+              "request_context" : "Downgrading to 2.3.6.0-3712",
+              "request_id" : 10,
+              "request_status" : "COMPLETED",
+              "skip_failures" : false,
+              "skip_service_check_failures" : false,
+              "start_time" : 1463780758807,
+              "suspended" : false,
+              "to_version" : "2.3.6.0-3712",
+              "type" : "INTERNAL_REQUEST",
+              "upgrade_type" : "NON_ROLLING"
+            }
+          },
+          {
+            "href" : "http://bdavm079.svl.ibm.com:8080/api/v1/clusters/bi/upgrades/11",
+            "Upgrade" : {
+              "cluster_name" : "bi",
+              "create_time" : 1463781287967,
+              "direction" : "UPGRADE",
+              "downgrade_allowed" : true,
+              "end_time" : 1463781341452,
+              "exclusive" : false,
+              "from_version" : "2.3.6.0-3712",
+              "pack" : "nonrolling-upgrade-2.4",
+              "progress_percent" : 100.0,
+              "request_context" : "Upgrading to 2.4.0.0-169",
+              "request_id" : 11,
+              "request_status" : "ABORTED",
+              "skip_failures" : false,
+              "skip_service_check_failures" : false,
+              "start_time" : 1463781288984,
+              "suspended" : false,
+              "to_version" : "2.4.0.0-169",
+              "type" : "INTERNAL_REQUEST",
+              "upgrade_type" : "NON_ROLLING"
+            }
+          },
+          {
+            "href" : "http://bdavm079.svl.ibm.com:8080/api/v1/clusters/bi/upgrades/12",
+            "Upgrade" : {
+              "cluster_name" : "bi",
+              "create_time" : 1463781341576,
+              "direction" : "DOWNGRADE",
+              "downgrade_allowed" : true,
+              "end_time" : 1463781371778,
+              "exclusive" : false,
+              "from_version" : "2.3.6.0-3712",
+              "pack" : "nonrolling-upgrade-2.4",
+              "progress_percent" : 100.0,
+              "request_context" : "Downgrading to 2.3.6.0-3712",
+              "request_id" : 12,
+              "request_status" : "COMPLETED",
+              "skip_failures" : false,
+              "skip_service_check_failures" : false,
+              "start_time" : 1463781342585,
+              "suspended" : false,
+              "to_version" : "2.3.6.0-3712",
+              "type" : "INTERNAL_REQUEST",
+              "upgrade_type" : "NON_ROLLING"
+            }
+          },
+          {
+            "href" : "http://bdavm079.svl.ibm.com:8080/api/v1/clusters/bi/upgrades/13",
+            "Upgrade" : {
+              "cluster_name" : "bi",
+              "create_time" : 1464120656181,
+              "direction" : "UPGRADE",
+              "downgrade_allowed" : true,
+              "end_time" : 1464120881477,
+              "exclusive" : false,
+              "from_version" : "2.3.6.0-3712",
+              "pack" : "upgrade-2.4",
+              "progress_percent" : 100.0,
+              "request_context" : "Upgrading to 2.4.0.0-169",
+              "request_id" : 13,
+              "request_status" : "ABORTED",
+              "skip_failures" : false,
+              "skip_service_check_failures" : false,
+              "start_time" : 1464120657198,
+              "suspended" : false,
+              "to_version" : "2.4.0.0-169",
+              "type" : "INTERNAL_REQUEST",
+              "upgrade_type" : "ROLLING"
+            }
+          },
+          {
+            "href" : "http://bdavm079.svl.ibm.com:8080/api/v1/clusters/bi/upgrades/14",
+            "Upgrade" : {
+              "cluster_name" : "bi",
+              "create_time" : 1464120881574,
+              "direction" : "DOWNGRADE",
+              "downgrade_allowed" : true,
+              "end_time" : 1464120918774,
+              "exclusive" : false,
+              "from_version" : "2.3.6.0-3712",
+              "pack" : "upgrade-2.4",
+              "progress_percent" : 100.0,
+              "request_context" : "Downgrading to 2.3.6.0-3712",
+              "request_id" : 14,
+              "request_status" : "COMPLETED",
+              "skip_failures" : false,
+              "skip_service_check_failures" : false,
+              "start_time" : 1464120882580,
+              "suspended" : false,
+              "to_version" : "2.3.6.0-3712",
+              "type" : "INTERNAL_REQUEST",
+              "upgrade_type" : "ROLLING"
+            }
+          },
+          {
+            "href" : "http://bdavm079.svl.ibm.com:8080/api/v1/clusters/bi/upgrades/15",
+            "Upgrade" : {
+              "cluster_name" : "bi",
+              "create_time" : 1464120943986,
+              "direction" : "UPGRADE",
+              "downgrade_allowed" : true,
+              "end_time" : 1464121132856,
+              "exclusive" : false,
+              "from_version" : "2.3.6.0-3712",
+              "pack" : "upgrade-2.4",
+              "progress_percent" : 100.0,
+              "request_context" : "Upgrading to 2.4.0.0-169",
+              "request_id" : 15,
+              "request_status" : "ABORTED",
+              "skip_failures" : false,
+              "skip_service_check_failures" : false,
+              "start_time" : 1464120945002,
+              "suspended" : false,
+              "to_version" : "2.4.0.0-169",
+              "type" : "INTERNAL_REQUEST",
+              "upgrade_type" : "ROLLING"
+            }
+          },
+          {
+            "href" : "http://bdavm079.svl.ibm.com:8080/api/v1/clusters/bi/upgrades/16",
+            "Upgrade" : {
+              "cluster_name" : "bi",
+              "create_time" : 1464121132981,
+              "direction" : "DOWNGRADE",
+              "downgrade_allowed" : true,
+              "end_time" : 1464121167178,
+              "exclusive" : false,
+              "from_version" : "2.3.6.0-3712",
+              "pack" : "upgrade-2.4",
+              "progress_percent" : 100.0,
+              "request_context" : "Downgrading to 2.3.6.0-3712",
+              "request_id" : 16,
+              "request_status" : "COMPLETED",
+              "skip_failures" : false,
+              "skip_service_check_failures" : false,
+              "start_time" : 1464121133988,
+              "suspended" : false,
+              "to_version" : "2.3.6.0-3712",
+              "type" : "INTERNAL_REQUEST",
+              "upgrade_type" : "ROLLING"
+            }
+          },
+          {
+            "href" : "http://bdavm079.svl.ibm.com:8080/api/v1/clusters/bi/upgrades/17",
+            "Upgrade" : {
+              "cluster_name" : "bi",
+              "create_time" : 1464121207511,
+              "direction" : "UPGRADE",
+              "downgrade_allowed" : true,
+              "end_time" : 1464121301821,
+              "exclusive" : false,
+              "from_version" : "2.3.6.0-3712",
+              "pack" : "nonrolling-upgrade-2.4",
+              "progress_percent" : 100.0,
+              "request_context" : "Upgrading to 2.4.0.0-169",
+              "request_id" : 17,
+              "request_status" : "ABORTED",
+              "skip_failures" : false,
+              "skip_service_check_failures" : false,
+              "start_time" : 1464121208524,
+              "suspended" : false,
+              "to_version" : "2.4.0.0-169",
+              "type" : "INTERNAL_REQUEST",
+              "upgrade_type" : "NON_ROLLING"
+            }
+          },
+          {
+            "href" : "http://bdavm079.svl.ibm.com:8080/api/v1/clusters/bi/upgrades/18",
+            "Upgrade" : {
+              "cluster_name" : "bi",
+              "create_time" : 1464121301933,
+              "direction" : "DOWNGRADE",
+              "downgrade_allowed" : true,
+              "end_time" : 1464121336149,
+              "exclusive" : false,
+              "from_version" : "2.3.6.0-3712",
+              "pack" : "nonrolling-upgrade-2.4",
+              "progress_percent" : 100.0,
+              "request_context" : "Downgrading to 2.3.6.0-3712",
+              "request_id" : 18,
+              "request_status" : "COMPLETED",
+              "skip_failures" : false,
+              "skip_service_check_failures" : false,
+              "start_time" : 1464121302941,
+              "suspended" : false,
+              "to_version" : "2.3.6.0-3712",
+              "type" : "INTERNAL_REQUEST",
+              "upgrade_type" : "NON_ROLLING"
+            }
+          }
+        ]
+      };
+
+    var upgradeParseResult = {
+        'clusterName':'bi',
+        'createTime':1464121301933,
+        "direction" : "DOWNGRADE",
+        "downgradeAllowed" : true,
+        "endTime" : 1464121336149,
+        "fromVersion" : "2.3.6.0-3712",
+        "requestId" : 18,
+        "requestStatus" : "COMPLETED",
+        "skipFailures" : false,
+        "skipServiceCheckFailures" : false,
+        "startTime" : 1464121302941,
+        "toVersion" : "2.3.6.0-3712",
+        "upgradeType" : "NON_ROLLING"
+    };
+
+    beforeEach(function () {
+      App.resetDsStoreTypeMap(App.StackUpgradeHistory);
+      sinon.stub(App.store, 'commit', Em.K);
+    });
+
+    afterEach(function(){
+      App.store.commit.restore();
+    });
+
+    it('Parse upgrade records returned by the Ambari server', function () {
+      App.stackUpgradeHistoryMapper.map(data);
+      var all_records = App.StackUpgradeHistory.find();
+      var upgrades = all_records.toArray();
+      expect(upgrades.length).to.eql(12);
+      var total_downgrades = 0;
+      var total_upgrades = 0;
+      upgrades.forEach(function(upgrade){
+        var direction = upgrade.get('direction')
+        if ('DOWNGRADE' == direction){
+          total_downgrades++;
+        }
+        if ('UPGRADE' == direction){
+          total_upgrades++;
+        }
+      });
+      expect(total_upgrades).to.eql(6);
+      expect(total_downgrades).to.eql(6);
+
+      var record = App.StackUpgradeHistory.find().findProperty('requestId', 18);
+      Em.keys(upgradeParseResult).forEach(function (key) {
+        expect(record.get(key)).to.eql(upgradeParseResult[key]);
+      });
+    });
+  });
+});
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/ef418377/ambari-web/test/models/finished_upgrade_entity_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/models/finished_upgrade_entity_test.js b/ambari-web/test/models/finished_upgrade_entity_test.js
new file mode 100644
index 0000000..a2a9527
--- /dev/null
+++ b/ambari-web/test/models/finished_upgrade_entity_test.js
@@ -0,0 +1,197 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+var App = require('app');
+require('models/finished_upgrade_entity');
+
+function getModel() {
+  return App.finishedUpgradeEntity.create();
+}
+
+describe('App.finishedUpgradeEntity', function () {
+  var model;
+
+  beforeEach(function () {
+    model = getModel();
+  });
+
+  App.TestAliases.testAsComputedNotEqual(getModel(), 'isVisible', 'status', 'PENDING');
+
+  describe("#progress", function() {
+    it("progress_percent = 1.9", function() {
+      model.set('progress_percent', 1.9);
+      model.propertyDidChange('progress');
+      expect(model.get('progress')).to.equal(1);
+    });
+    it("progress_percent = 1", function() {
+      model.set('progress_percent', 1);
+      model.propertyDidChange('progress');
+      expect(model.get('progress')).to.equal(1);
+    });
+  });
+
+  describe("#isActive", function() {
+    it("status IN_PROGRESS", function() {
+      model.set('status', 'IN_PROGRESS');
+      model.propertyDidChange('isActive');
+      expect(model.get('isActive')).to.be.true;
+    });
+    it("status PENDING", function() {
+      model.set('status', 'PENDING');
+      model.propertyDidChange('isActive');
+      expect(model.get('isActive')).to.be.false;
+    });
+  });
+
+  describe('#isExpandableGroup', function () {
+
+    var cases = [
+      {
+        input: {
+          type: 'ITEM'
+        },
+        isExpandableGroup: false,
+        title: 'not upgrade group'
+      },
+      {
+        input: {
+          type: 'GROUP',
+          status: 'PENDING',
+          hasExpandableItems: false
+        },
+        isExpandableGroup: false,
+        title: 'pending upgrade group without expandable items'
+      },
+      {
+        input: {
+          type: 'GROUP',
+          status: 'ABORTED',
+          hasExpandableItems: false
+        },
+        isExpandableGroup: true,
+        title: 'aborted upgrade group without expandable items'
+      },
+      {
+        input: {
+          type: 'GROUP',
+          status: 'ABORTED',
+          hasExpandableItems: true
+        },
+        isExpandableGroup: true,
+        title: 'aborted upgrade group with expandable items'
+      },
+      {
+        input: {
+          type: 'GROUP',
+          status: 'IN_PROGRESS',
+          hasExpandableItems: false
+        },
+        isExpandableGroup: true,
+        title: 'active upgrade group'
+      }
+    ];
+
+    cases.forEach(function (item) {
+      it(item.title, function () {
+        model.setProperties(item.input);
+        expect(model.get('isExpandableGroup')).to.equal(item.isExpandableGroup);
+      });
+    });
+
+  });
+
+  describe('#upgradeGroupStatus', function () {
+
+    var cases = [
+      {
+        input: {
+          type: 'ITEM',
+          upgradeSuspended: false
+        },
+        upgradeGroupStatus: undefined,
+        title: 'not upgrade group'
+      },
+      {
+        input: {
+          type: 'GROUP',
+          status: 'PENDING',
+          hasExpandableItems: false,
+          upgradeSuspended: false
+        },
+        upgradeGroupStatus: 'PENDING',
+        title: 'pending upgrade group'
+      },
+      {
+        input: {
+          type: 'GROUP',
+          status: 'PENDING',
+          hasExpandableItems: true,
+          upgradeSuspended: false
+        },
+        upgradeGroupStatus: 'SUBITEM_FAILED',
+        title: 'pending upgrade group with expandable items'
+      },
+      {
+        input: {
+          type: 'GROUP',
+          status: 'ABORTED',
+          hasExpandableItems: false,
+          upgradeSuspended: false
+        },
+        upgradeGroupStatus: 'ABORTED',
+        title: 'aborted upgrade group with expandable items'
+      },
+      {
+        input: {
+          type: 'GROUP',
+          status: 'ABORTED',
+          hasExpandableItems: true,
+          upgradeSuspended: true
+        },
+        upgradeGroupStatus: 'ABORTED',
+        title: 'aborted upgrade group with expandable items'
+      },
+      {
+        input: {
+          type: 'GROUP',
+          status: 'IN_PROGRESS',
+          hasExpandableItems: false,
+          upgradeSuspended: false
+        },
+        upgradeGroupStatus: 'IN_PROGRESS',
+        title: 'active upgrade'
+      }
+    ];
+
+    beforeEach(function() {
+      this.mock = sinon.stub(App, 'get');
+    });
+    afterEach(function() {
+      this.mock.restore();
+    });
+
+    cases.forEach(function (item) {
+      it(item.title, function () {
+        this.mock.returns(item.input.upgradeSuspended);
+        model.setProperties(item.input);
+        expect(model.get('upgradeGroupStatus')).to.equal(item.upgradeGroupStatus);
+      });
+    });
+
+  });
+});
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/ef418377/ambari-web/test/views/main/admin/stack_upgrade/upgrade_history_details_view_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/views/main/admin/stack_upgrade/upgrade_history_details_view_test.js b/ambari-web/test/views/main/admin/stack_upgrade/upgrade_history_details_view_test.js
new file mode 100644
index 0000000..ab38e34
--- /dev/null
+++ b/ambari-web/test/views/main/admin/stack_upgrade/upgrade_history_details_view_test.js
@@ -0,0 +1,248 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+var App = require('app');
+require('views/main/admin/stack_upgrade/upgrade_history_details_view');
+var testHelpers = require('test/helpers');
+
+describe('App.MainAdminStackUpgradeHistoryDetailsView', function () {
+  var view;
+
+  beforeEach(function () {
+    view = App.MainAdminStackUpgradeHistoryDetailsView.create({
+      controller: {
+        currentUpgradeRecord: App.StackUpgradeHistory.createRecord({
+          'requestId':1,
+          'direction':'DOWNGRADE',
+        }),
+      }
+    });
+  });
+
+  afterEach(function () {
+    view.destroy();
+  });
+
+  describe("#overallProgress", function () {
+    it("progress is 1.9", function () {
+      view.set('controller.upgradeData', {
+        Upgrade: {
+          progress_percent: 1.9
+        }
+      });
+      expect(view.get('overallProgress')).to.equal(1);
+    });
+    it("progress is 1", function () {
+      view.set('controller.upgradeData', {
+        Upgrade: {
+          progress_percent: 1
+        }
+      });
+      expect(view.get('overallProgress')).to.equal(1);
+    });
+  });
+
+  describe("#willInsertElement()", function() {
+    beforeEach(function () {
+      sinon.spy(view.get('controller'), 'loadStackUpgradeRecord');
+    });
+    afterEach(function () {
+      view.get('controller').loadStackUpgradeRecord.restore();
+    });
+    it("load data by controller is called once", function() {
+      view.set('controller.currentUpgradeRecord', App.StackUpgradeHistory.createRecord({
+        'requestId':1,
+        'direction':'DOWNGRADE',
+      }));
+      view.willInsertElement();
+      expect(view.get('controller').loadStackUpgradeRecord.calledOnce).to.be.true;
+    });
+  });
+
+  describe("#willDestroyElement()", function () {
+    it("reset ready flag", function () {
+      view.set('isReady', true);
+      view.willDestroyElement();
+      expect(view.get('isReady')).to.be.false;
+    });
+  });
+
+  describe("#upgradeStatusLabel", function () {
+    beforeEach(function () {
+      Em.setFullPath(view, 'controller.upgradeData.Upgrade', {});
+    });
+
+    [
+      {
+        data: {
+          status: 'QUEUED',
+          isDowngrade: false
+        },
+        result: Em.I18n.t('admin.stackUpgrade.state.inProgress')
+      },
+      {
+        data: {
+          status: 'PENDING',
+          isDowngrade: false
+        },
+        result: Em.I18n.t('admin.stackUpgrade.state.inProgress')
+      },
+      {
+        data: {
+          status: 'IN_PROGRESS',
+          isDowngrade: false
+        },
+        result: Em.I18n.t('admin.stackUpgrade.state.inProgress')
+      },
+      {
+        data: {
+          status: 'COMPLETED',
+          isDowngrade: false
+        },
+        result: Em.I18n.t('admin.stackUpgrade.state.completed')
+      },
+      {
+        data: {
+          status: 'ABORTED',
+          isDowngrade: false
+        },
+        result: Em.I18n.t('admin.stackUpgrade.state.paused')
+      },
+      {
+        data: {
+          status: 'TIMEDOUT',
+          isDowngrade: false
+        },
+        result: Em.I18n.t('admin.stackUpgrade.state.paused')
+      },
+      {
+        data: {
+          status: 'FAILED',
+          isDowngrade: false
+        },
+        result: Em.I18n.t('admin.stackUpgrade.state.paused')
+      },
+      {
+        data: {
+          status: 'HOLDING_FAILED',
+          isDowngrade: false
+        },
+        result: Em.I18n.t('admin.stackUpgrade.state.paused')
+      },
+      {
+        data: {
+          status: 'HOLDING_TIMEDOUT',
+          isDowngrade: false
+        },
+        result: Em.I18n.t('admin.stackUpgrade.state.paused')
+      },
+      {
+        data: {
+          status: 'HOLDING',
+          isDowngrade: false
+        },
+        result: Em.I18n.t('admin.stackUpgrade.state.paused')
+      },
+      {
+        data: {
+          status: '',
+          isDowngrade: false
+        },
+        result: ''
+      },
+      {
+        data: {
+          status: 'QUEUED',
+          isDowngrade: true
+        },
+        result: Em.I18n.t('admin.stackUpgrade.state.inProgress.downgrade')
+      },
+      {
+        data: {
+          status: 'PENDING',
+          isDowngrade: true
+        },
+        result: Em.I18n.t('admin.stackUpgrade.state.inProgress.downgrade')
+      },
+      {
+        data: {
+          status: 'IN_PROGRESS',
+          isDowngrade: true
+        },
+        result: Em.I18n.t('admin.stackUpgrade.state.inProgress.downgrade')
+      },
+      {
+        data: {
+          status: 'COMPLETED',
+          isDowngrade: true
+        },
+        result: Em.I18n.t('admin.stackUpgrade.state.completed.downgrade')
+      },
+      {
+        data: {
+          status: 'ABORTED',
+          isDowngrade: true
+        },
+        result: Em.I18n.t('admin.stackUpgrade.state.paused.downgrade')
+      },
+      {
+        data: {
+          status: 'TIMEDOUT',
+          isDowngrade: true
+        },
+        result: Em.I18n.t('admin.stackUpgrade.state.paused.downgrade')
+      },
+      {
+        data: {
+          status: 'FAILED',
+          isDowngrade: true
+        },
+        result: Em.I18n.t('admin.stackUpgrade.state.paused.downgrade')
+      },
+      {
+        data: {
+          status: 'HOLDING_FAILED',
+          isDowngrade: true
+        },
+        result: Em.I18n.t('admin.stackUpgrade.state.paused.downgrade')
+      },
+      {
+        data: {
+          status: 'HOLDING_TIMEDOUT',
+          isDowngrade: true
+        },
+        result: Em.I18n.t('admin.stackUpgrade.state.paused.downgrade')
+      },
+      {
+        data: {
+          status: 'HOLDING',
+          isDowngrade: true
+        },
+        result: Em.I18n.t('admin.stackUpgrade.state.paused.downgrade')
+      }
+    ].forEach(function (test) {
+        it('status = ' + test.data.status + ", isDowngrade = " + test.data.isDowngrade, function () {
+          view.set('controller.isDowngrade', test.data.isDowngrade);
+          view.set('controller.upgradeData.Upgrade.request_status', test.data.status);
+          view.propertyDidChange('upgradeStatusLabel');
+          expect(view.get('upgradeStatusLabel')).to.equal(test.result);
+        });
+      });
+  });
+});
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/ef418377/ambari-web/test/views/main/admin/stack_upgrade/upgrade_history_view_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/views/main/admin/stack_upgrade/upgrade_history_view_test.js b/ambari-web/test/views/main/admin/stack_upgrade/upgrade_history_view_test.js
new file mode 100644
index 0000000..d68a854
--- /dev/null
+++ b/ambari-web/test/views/main/admin/stack_upgrade/upgrade_history_view_test.js
@@ -0,0 +1,173 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+var App = require('app');
+require('views/main/admin/stack_upgrade/upgrade_history_view');
+var testHelpers = require('test/helpers');
+
+describe('App.MainAdminStackUpgradeHistoryView', function () {
+  var view;
+
+  beforeEach(function () {
+    view = App.MainAdminStackUpgradeHistoryView.create();
+  });
+
+  afterEach(function () {
+    view.destroy();
+  });
+
+  describe("#filterBy()", function () {
+    var records = [
+        Em.Object.create({
+          requestStatus: "ABORTED",
+          direction: "UPGRADE"
+        }),
+        Em.Object.create({
+          requestStatus: "ABORTED",
+          direction: "DOWNGRADE"
+        }),
+        Em.Object.create({
+          requestStatus: "COMPLETED",
+          direction: "UPGRADE"
+        }),
+        Em.Object.create({
+          requestStatus: "COMPLETED",
+          direction: "DOWNGRADE"
+        })
+      ];
+
+
+    beforeEach(function () {
+      this.mock = sinon.stub(App.StackUpgradeHistory, 'find');
+    });
+
+    afterEach(function () {
+      this.mock.restore();
+    });
+
+    it('All should return all records', function(){
+      this.mock.returns(records);
+      var filteredResults = view.filterBy('ALL')
+      expect(filteredResults.length == 4).to.be.true
+    });
+
+    it('Filter aborted upgrades', function(){
+      this.mock.returns(records);
+      var filteredResults = view.filterBy('UPGRADE_ABORTED')
+      expect(filteredResults.length == 1).to.be.true
+    });
+
+    it('Filter completed upgrades', function(){
+      this.mock.returns(records);
+      var filteredResults = view.filterBy('UPGRADE_COMPLETED')
+      expect(filteredResults.length == 1).to.be.true
+    });
+
+    it('Filter aborted downgrades', function(){
+      this.mock.returns(records);
+      var filteredResults = view.filterBy('DOWNGRADE_ABORTED')
+      expect(filteredResults.length == 1).to.be.true
+    });
+
+    it('Filter completed downgrades', function(){
+      this.mock.returns(records);
+      var filteredResults = view.filterBy('DOWNGRADE_COMPLETED')
+      expect(filteredResults.length == 1).to.be.true
+    });
+  });
+
+  describe("#didInsertElement()", function() {
+    beforeEach(function () {
+      sinon.stub(view, 'observesCategories', Em.K);
+    });
+    afterEach(function () {
+      view.observesCategories.restore();
+    });
+    it("observesCategories is called once", function() {
+      view.didInsertElement();
+      expect(view.observesCategories.calledOnce).to.be.true;
+    });
+  });
+
+  describe("#observesCategories()", function () {
+    var mock = {format: Em.K};
+    beforeEach(function () {
+      sinon.stub(Em.I18n, 't').returns(mock);
+      sinon.stub(mock, 'format').returns('label');
+      sinon.stub(view, 'filterBy').returns([]);
+      view.set('categories', [
+        Em.Object.create({
+          labelKey: 'labelKey',
+          value: 'value',
+          isSelected: false
+        })
+      ]);
+      view.observesCategories();
+    });
+    afterEach(function () {
+      Em.I18n.t.restore();
+      mock.format.restore();
+      view.filterBy.restore();
+    });
+    it("categories[0].label is updated", function () {
+      expect(view.get('categories')[0].get('label')).to.equal('label');
+    });
+  });
+
+  describe("#selectCategory()", function() {
+    var event;
+    beforeEach(function () {
+      event = {
+        context: Em.Object.create({
+          isSelected: false,
+          value: 'ALL',
+        })
+      };
+      view.set('categories', [
+        Em.Object.create({
+          isSelected: true,
+          value: 'UPGRADE_COMPLETED',
+        }),
+        event.context
+      ]);
+      view.selectCategory(event);
+    });
+    afterEach(function () {
+    });
+    it("categories[0].isSelected false", function() {
+      expect(view.get('categories')[0].get('isSelected')).to.be.false;
+    });
+    it("isSelected is true", function() {
+      expect(event.context.get('isSelected')).to.be.true;
+    });
+  });
+
+  describe("#willInsertElement()", function() {
+    beforeEach(function () {
+      sinon.spy(view.get('controller'), 'loadStackUpgradeHistoryToModel');
+    });
+    afterEach(function () {
+      view.get('controller').loadStackUpgradeHistoryToModel.restore();
+    });
+    it("load data by controller is called once", function() {
+      view.willInsertElement();
+      expect(view.get('controller').loadStackUpgradeHistoryToModel.calledOnce).to.be.true;
+    });
+  });
+});


[25/50] ambari git commit: AMBARI-18980. Add manual confirm task before finalizing host-ordered upgrade (Duc Le via ncole)

Posted by sw...@apache.org.
AMBARI-18980. Add manual confirm task before finalizing host-ordered upgrade (Duc Le via ncole)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/aba96406
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/aba96406
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/aba96406

Branch: refs/heads/branch-feature-AMBARI-18901
Commit: aba9640623d3dd7c33d8544d15876926fece5ab9
Parents: 2686832
Author: Nate Cole <nc...@hortonworks.com>
Authored: Tue Nov 29 12:45:47 2016 -0500
Committer: Nate Cole <nc...@hortonworks.com>
Committed: Tue Nov 29 12:54:15 2016 -0500

----------------------------------------------------------------------
 .../stacks/HDP/2.5/upgrades/host-upgrade-2.5.xml         | 11 +++++++++--
 1 file changed, 9 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/aba96406/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/host-upgrade-2.5.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/host-upgrade-2.5.xml b/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/host-upgrade-2.5.xml
index 72241eea..64c79a8 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/host-upgrade-2.5.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/host-upgrade-2.5.xml
@@ -117,14 +117,21 @@
     <group xsi:type="cluster" name="POST_CLUSTER" title="Finalize {{direction.text.proper}}">
       <skippable>true</skippable>
       <supports-auto-skip-failure>false</supports-auto-skip-failure>
-      
+
+      <execute-stage title="Confirm Finalize">
+        <direction>UPGRADE</direction>
+        <task xsi:type="manual">
+          <message>Please confirm you are ready to finalize.</message>
+        </task>
+      </execute-stage>
+
       <execute-stage service="HDFS" component="NAMENODE" title="Execute HDFS Finalize">
         <task xsi:type="execute" hosts="master">
           <script>scripts/namenode.py</script>
           <function>finalize_rolling_upgrade</function>
         </task>
       </execute-stage>
-      
+
       <execute-stage title="Save Cluster State">
         <task xsi:type="server_action" class="org.apache.ambari.server.serveraction.upgrades.FinalizeUpgradeAction">
         </task>


[39/50] ambari git commit: AMBARI-18987 A general preupgrade check on if services cannot be upgrade are installed (dili)

Posted by sw...@apache.org.
AMBARI-18987 A general preupgrade check on if services cannot be upgrade are installed (dili)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/502cffba
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/502cffba
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/502cffba

Branch: refs/heads/branch-feature-AMBARI-18901
Commit: 502cffba182f33f8e5d98cc986d90b6d3b4c602a
Parents: 841a064
Author: Di Li <di...@apache.org>
Authored: Wed Nov 30 12:49:03 2016 -0500
Committer: Di Li <di...@apache.org>
Committed: Wed Nov 30 13:04:59 2016 -0500

----------------------------------------------------------------------
 .../server/checks/ServicePresenceCheck.java     | 55 +++++++++-----------
 .../HDP/2.3/upgrades/nonrolling-upgrade-2.5.xml |  5 +-
 .../HDP/2.3/upgrades/nonrolling-upgrade-2.6.xml |  5 +-
 .../stacks/HDP/2.3/upgrades/upgrade-2.5.xml     |  5 +-
 .../stacks/HDP/2.3/upgrades/upgrade-2.6.xml     |  5 +-
 .../HDP/2.4/upgrades/nonrolling-upgrade-2.5.xml |  5 +-
 .../HDP/2.4/upgrades/nonrolling-upgrade-2.6.xml |  5 +-
 .../stacks/HDP/2.4/upgrades/upgrade-2.5.xml     |  5 +-
 .../stacks/HDP/2.4/upgrades/upgrade-2.6.xml     |  5 +-
 9 files changed, 57 insertions(+), 38 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/502cffba/ambari-server/src/main/java/org/apache/ambari/server/checks/ServicePresenceCheck.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/checks/ServicePresenceCheck.java b/ambari-server/src/main/java/org/apache/ambari/server/checks/ServicePresenceCheck.java
index 0f4eeb1..4642b88 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/checks/ServicePresenceCheck.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/checks/ServicePresenceCheck.java
@@ -19,7 +19,6 @@ package org.apache.ambari.server.checks;
 
 import java.util.ArrayList;
 import java.util.Arrays;
-import java.util.Iterator;
 import java.util.LinkedHashMap;
 import java.util.List;
 import java.util.Map;
@@ -47,7 +46,7 @@ public class ServicePresenceCheck extends AbstractCheckDescriptor{
 
   private static final Logger LOG = LoggerFactory.getLogger(ServicePresenceCheck.class);
 
-  static final String KEY_SERVICE_REMOVED = "servcie_removed";
+  static final String KEY_SERVICE_REMOVED = "service_removed";
   /*
    * List of services that do not support upgrade
    * services must be removed before the stack upgrade
@@ -77,28 +76,26 @@ public class ServicePresenceCheck extends AbstractCheckDescriptor{
     Map<String, String> removedServices = getRemovedServices(request);
     List<String> failReasons = new ArrayList<>();
 
-    if(null != noUpgradeSupportServices && !noUpgradeSupportServices.isEmpty()){
-      String reason = getFailReason(prerequisiteCheck, request);
-      for(String service: noUpgradeSupportServices){
-        if (installedServices.contains(service.toUpperCase())){
-          prerequisiteCheck.getFailedOn().add(service);
-          String msg = String.format(reason, service, service);
-          failReasons.add(msg);
-        }
+    String reason = getFailReason(prerequisiteCheck, request);
+    for(String service: noUpgradeSupportServices){
+      if (installedServices.contains(service.toUpperCase())){
+        prerequisiteCheck.getFailedOn().add(service);
+        String msg = String.format(reason, service, service);
+        failReasons.add(msg);
       }
     }
-    if(null != removedServices){
-      String reason = getFailReason(KEY_SERVICE_REMOVED, prerequisiteCheck, request);
-      for (Map.Entry<String, String> entry : removedServices.entrySet()) {
-        String removedService = entry.getKey();
-        if(installedServices.contains(removedService.toUpperCase())){
-          prerequisiteCheck.getFailedOn().add(removedService);
-          String newService = entry.getValue();
-          String msg = String.format(reason, removedService, newService);
-          failReasons.add(msg);
-        }
+
+    reason = getFailReason(KEY_SERVICE_REMOVED, prerequisiteCheck, request);
+    for (Map.Entry<String, String> entry : removedServices.entrySet()) {
+      String removedService = entry.getKey();
+      if(installedServices.contains(removedService.toUpperCase())){
+        prerequisiteCheck.getFailedOn().add(removedService);
+        String newService = entry.getValue();
+        String msg = String.format(reason, removedService, newService);
+        failReasons.add(msg);
       }
     }
+
     if(!failReasons.isEmpty()){
       prerequisiteCheck.setStatus(PrereqCheckStatus.FAIL);
       prerequisiteCheck.setFailReason(StringUtils.join(failReasons, '\n'));
@@ -126,52 +123,50 @@ public class ServicePresenceCheck extends AbstractCheckDescriptor{
    * @return service names
    * */
   private List<String> getNoUpgradeSupportServices(PrereqCheckRequest request){
+    List<String> result = new ArrayList<String>();
     String value = getPropertyValue(request, NO_UPGRADE_SUPPORT_SERVICES_PROPERTY_NAME);
     if (null != value){
       String[] services = value.split(",");
-      List<String> result = new ArrayList<String>();
       for(String service: services){
         service = service.trim();
         if (!service.isEmpty()){
           result.add(service);
         }
       }
-      return result;
-    } else {
-      return null;
     }
+    return result;
   }
 
   /**
    * @return service names and new service names map
    * */
   private Map<String, String> getRemovedServices(PrereqCheckRequest request) throws AmbariException{
+    Map<String, String> result = new LinkedHashMap<String, String>();
     String value = getPropertyValue(request, REMOVED_SERVICES_PROPERTY_NAME);
     String newValue = getPropertyValue(request, NEW_SERVICES_PROPERTY_NAME);
     if(value == null && newValue == null){
-      return null; //no need to check removed services as they are not specified in the upgrade xml file.
+      return result; //no need to check removed services as they are not specified in the upgrade xml file.
     } else {
       if (value == null || newValue == null){
-        throw new AmbariException("Removed services must be paired with new services list.");
+        throw new AmbariException(String.format("Both %s and %s list must be specified in the upgrade XML file.", REMOVED_SERVICES_PROPERTY_NAME, NEW_SERVICES_PROPERTY_NAME));
       } else {
         List<String> oldServices = Arrays.asList(value.split(","));
         List<String> newServices = Arrays.asList(newValue.split(","));
         if (oldServices.size() != newServices.size()){
-          throw new AmbariException("Removed services must be paired with new services list.");
+          throw new AmbariException(String.format("%s must have the same number of services as the %s list.", NEW_SERVICES_PROPERTY_NAME, REMOVED_SERVICES_PROPERTY_NAME));
         } else {
-          Map<String, String> result = new LinkedHashMap<String, String>();
           for (int i = 0; i < oldServices.size(); i++){
             String oldService = oldServices.get(i).trim();
             String newService = newServices.get(i).trim();
             if (oldService.isEmpty() || newService.isEmpty()) {
-              throw new AmbariException("Removed services must be paired with new services list.");
+              throw new AmbariException(String.format("Make sure both %s and %s list only contain comma separated list of services.", NEW_SERVICES_PROPERTY_NAME, REMOVED_SERVICES_PROPERTY_NAME));
             } else {
               result.put(oldService, newService);
             }
           }
-          return result;
         }
       }
     }
+    return result;
   }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/502cffba/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.5.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.5.xml b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.5.xml
index 7a15e0e..66c0a70 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.5.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.5.xml
@@ -22,12 +22,15 @@
   <type>NON_ROLLING</type>
   <prerequisite-checks>
     <check>org.apache.ambari.server.checks.RangerAuditDbCheck</check>
-    <check>org.apache.ambari.server.checks.AtlasPresenceCheck</check>
+    <check>org.apache.ambari.server.checks.ServicePresenceCheck</check>
     <configuration>
       <!-- Configuration properties for all pre-reqs including required pre-reqs -->
       <check-properties name="org.apache.ambari.server.checks.HiveDynamicServiceDiscoveryCheck">
         <property name="min-failure-stack-version">HDP-2.3.0.0</property>
       </check-properties>
+      <check-properties name="org.apache.ambari.server.checks.ServicePresenceCheck">
+        <property name="no-upgrade-support-service-names">Atlas</property>
+      </check-properties>
     </configuration>
   </prerequisite-checks>
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/502cffba/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.6.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.6.xml b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.6.xml
index fe2598b..019c76e 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.6.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.6.xml
@@ -22,12 +22,15 @@
   <type>NON_ROLLING</type>
   <prerequisite-checks>
     <check>org.apache.ambari.server.checks.RangerAuditDbCheck</check>
-    <check>org.apache.ambari.server.checks.AtlasPresenceCheck</check>
+    <check>org.apache.ambari.server.checks.ServicePresenceCheck</check>
     <configuration>
       <!-- Configuration properties for all pre-reqs including required pre-reqs -->
       <check-properties name="org.apache.ambari.server.checks.HiveDynamicServiceDiscoveryCheck">
         <property name="min-failure-stack-version">HDP-2.3.0.0</property>
       </check-properties>
+      <check-properties name="org.apache.ambari.server.checks.ServicePresenceCheck">
+        <property name="no-upgrade-support-service-names">Atlas</property>
+      </check-properties>
     </configuration>
   </prerequisite-checks>
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/502cffba/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.5.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.5.xml b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.5.xml
index bbf5299..4d1b5f1 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.5.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.5.xml
@@ -33,7 +33,7 @@
     <check>org.apache.ambari.server.checks.ServicesYarnWorkPreservingCheck</check>
     <check>org.apache.ambari.server.checks.YarnRMHighAvailabilityCheck</check>
     <check>org.apache.ambari.server.checks.YarnTimelineServerStatePreservingCheck</check>
-    <check>org.apache.ambari.server.checks.AtlasPresenceCheck</check>
+    <check>org.apache.ambari.server.checks.ServicePresenceCheck</check>
     <check>org.apache.ambari.server.checks.RangerAuditDbCheck</check>
 
     <!-- Specific to HDP 2.5, Storm is not rolling -->
@@ -45,6 +45,9 @@
       <check-properties name="org.apache.ambari.server.checks.HiveDynamicServiceDiscoveryCheck">
         <property name="min-failure-stack-version">HDP-2.3.0.0</property>
       </check-properties>
+      <check-properties name="org.apache.ambari.server.checks.ServicePresenceCheck">
+        <property name="no-upgrade-support-service-names">Atlas</property>
+      </check-properties>
     </configuration>
   </prerequisite-checks>
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/502cffba/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.6.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.6.xml b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.6.xml
index c5fdc99..02cef57 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.6.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.6.xml
@@ -33,7 +33,7 @@
     <check>org.apache.ambari.server.checks.ServicesYarnWorkPreservingCheck</check>
     <check>org.apache.ambari.server.checks.YarnRMHighAvailabilityCheck</check>
     <check>org.apache.ambari.server.checks.YarnTimelineServerStatePreservingCheck</check>
-    <check>org.apache.ambari.server.checks.AtlasPresenceCheck</check>
+    <check>org.apache.ambari.server.checks.ServicePresenceCheck</check>
     <check>org.apache.ambari.server.checks.RangerAuditDbCheck</check>
 
     <!-- Specific to HDP 2.5, Storm is not rolling -->
@@ -45,6 +45,9 @@
       <check-properties name="org.apache.ambari.server.checks.HiveDynamicServiceDiscoveryCheck">
         <property name="min-failure-stack-version">HDP-2.3.0.0</property>
       </check-properties>
+      <check-properties name="org.apache.ambari.server.checks.ServicePresenceCheck">
+        <property name="no-upgrade-support-service-names">Atlas</property>
+      </check-properties>
     </configuration>
   </prerequisite-checks>
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/502cffba/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/nonrolling-upgrade-2.5.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/nonrolling-upgrade-2.5.xml b/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/nonrolling-upgrade-2.5.xml
index a46ac9e..5183580 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/nonrolling-upgrade-2.5.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/nonrolling-upgrade-2.5.xml
@@ -22,13 +22,16 @@
   <type>NON_ROLLING</type>
   <prerequisite-checks>
     <check>org.apache.ambari.server.checks.RangerAuditDbCheck</check>
-    <check>org.apache.ambari.server.checks.AtlasPresenceCheck</check>
+    <check>org.apache.ambari.server.checks.ServicePresenceCheck</check>
 
     <configuration>
       <!-- Configuration properties for all pre-reqs including required pre-reqs -->
       <check-properties name="org.apache.ambari.server.checks.HiveDynamicServiceDiscoveryCheck">
         <property name="min-failure-stack-version">HDP-2.3.0.0</property>
       </check-properties>
+      <check-properties name="org.apache.ambari.server.checks.ServicePresenceCheck">
+        <property name="no-upgrade-support-service-names">Atlas</property>
+      </check-properties>
     </configuration>
   </prerequisite-checks>
   

http://git-wip-us.apache.org/repos/asf/ambari/blob/502cffba/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/nonrolling-upgrade-2.6.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/nonrolling-upgrade-2.6.xml b/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/nonrolling-upgrade-2.6.xml
index 6597cde..2a1ecf7 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/nonrolling-upgrade-2.6.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/nonrolling-upgrade-2.6.xml
@@ -22,13 +22,16 @@
   <type>NON_ROLLING</type>
   <prerequisite-checks>
     <check>org.apache.ambari.server.checks.RangerAuditDbCheck</check>
-    <check>org.apache.ambari.server.checks.AtlasPresenceCheck</check>
+    <check>org.apache.ambari.server.checks.ServicePresenceCheck</check>
 
     <configuration>
       <!-- Configuration properties for all pre-reqs including required pre-reqs -->
       <check-properties name="org.apache.ambari.server.checks.HiveDynamicServiceDiscoveryCheck">
         <property name="min-failure-stack-version">HDP-2.3.0.0</property>
       </check-properties>
+      <check-properties name="org.apache.ambari.server.checks.ServicePresenceCheck">
+        <property name="no-upgrade-support-service-names">Atlas</property>
+      </check-properties>
     </configuration>
   </prerequisite-checks>
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/502cffba/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/upgrade-2.5.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/upgrade-2.5.xml b/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/upgrade-2.5.xml
index f5a5669..201150d 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/upgrade-2.5.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/upgrade-2.5.xml
@@ -34,7 +34,7 @@
     <check>org.apache.ambari.server.checks.ServicesYarnWorkPreservingCheck</check>
     <check>org.apache.ambari.server.checks.YarnRMHighAvailabilityCheck</check>
     <check>org.apache.ambari.server.checks.YarnTimelineServerStatePreservingCheck</check>
-    <check>org.apache.ambari.server.checks.AtlasPresenceCheck</check>
+    <check>org.apache.ambari.server.checks.ServicePresenceCheck</check>
     <check>org.apache.ambari.server.checks.RangerAuditDbCheck</check>
 
     <!-- Specific to HDP 2.5, Storm is not rolling -->
@@ -46,6 +46,9 @@
       <check-properties name="org.apache.ambari.server.checks.HiveDynamicServiceDiscoveryCheck">
         <property name="min-failure-stack-version">HDP-2.3.0.0</property>
       </check-properties>
+      <check-properties name="org.apache.ambari.server.checks.ServicePresenceCheck">
+        <property name="no-upgrade-support-service-names">Atlas</property>
+      </check-properties>
     </configuration>
   </prerequisite-checks>
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/502cffba/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/upgrade-2.6.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/upgrade-2.6.xml b/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/upgrade-2.6.xml
index 406f38e..cb16953 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/upgrade-2.6.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/upgrade-2.6.xml
@@ -34,7 +34,7 @@
     <check>org.apache.ambari.server.checks.ServicesYarnWorkPreservingCheck</check>
     <check>org.apache.ambari.server.checks.YarnRMHighAvailabilityCheck</check>
     <check>org.apache.ambari.server.checks.YarnTimelineServerStatePreservingCheck</check>
-    <check>org.apache.ambari.server.checks.AtlasPresenceCheck</check>
+    <check>org.apache.ambari.server.checks.ServicePresenceCheck</check>
     <check>org.apache.ambari.server.checks.RangerAuditDbCheck</check>
 
     <!-- Specific to HDP 2.5, Storm is not rolling -->
@@ -46,6 +46,9 @@
       <check-properties name="org.apache.ambari.server.checks.HiveDynamicServiceDiscoveryCheck">
         <property name="min-failure-stack-version">HDP-2.3.0.0</property>
       </check-properties>
+      <check-properties name="org.apache.ambari.server.checks.ServicePresenceCheck">
+        <property name="no-upgrade-support-service-names">Atlas</property>
+      </check-properties>
     </configuration>
   </prerequisite-checks>
 


[04/50] ambari git commit: AMBARI-18993. Perf: Status commands return INSTALLED even if component in STARTED state.(vbrodetskyi)

Posted by sw...@apache.org.
AMBARI-18993. Perf: Status commands return INSTALLED even if component in STARTED state.(vbrodetskyi)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/4e4a1f92
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/4e4a1f92
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/4e4a1f92

Branch: refs/heads/branch-feature-AMBARI-18901
Commit: 4e4a1f92b48b0de703acbb1028c29b62a99d4c02
Parents: 8278384
Author: Vitaly Brodetskyi <vb...@hortonworks.com>
Authored: Sun Nov 27 19:17:58 2016 +0200
Committer: Vitaly Brodetskyi <vb...@hortonworks.com>
Committed: Sun Nov 27 19:17:58 2016 +0200

----------------------------------------------------------------------
 ambari-agent/conf/unix/agent-multiplier.py | 14 ++++++++++++--
 1 file changed, 12 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/4e4a1f92/ambari-agent/conf/unix/agent-multiplier.py
----------------------------------------------------------------------
diff --git a/ambari-agent/conf/unix/agent-multiplier.py b/ambari-agent/conf/unix/agent-multiplier.py
index dec79eb..7c0774c 100644
--- a/ambari-agent/conf/unix/agent-multiplier.py
+++ b/ambari-agent/conf/unix/agent-multiplier.py
@@ -59,10 +59,12 @@ class Multiplier:
     self.log_dir = "/var/log/ambari-agent"
     self.config_dir = "/etc/ambari-agent/conf"
     self.pid_file = "/var/run/ambari-agent/ambari-agent.pid"
+    self.prefix_dir = "/var/lib/ambari-agent/data"
 
     # Ambari Agent config file to use as a template
     # Will change hostname and port after copying
     self.source_config_file = "/etc/ambari-agent/conf/ambari-agent.ini"
+    self.source_version_file = "/var/lib/ambari-agent/data/version"
     self.base_ping_port = 5000
 
     self.start = 0
@@ -159,11 +161,12 @@ class Multiplier:
       host_config_dir = host_home_dir + self.config_dir
       host_pid_file = host_home_dir + self.pid_file
       host_pid_dir = os.path.dirname(host_pid_file)
+      host_prefix = host_home_dir + self.prefix_dir
 
       if self.verbose:
         print "Analyzing host %s with port %d" % (host_name, host.ping_port)
 
-      for dir in [host_home_dir, host_log_dir, host_config_dir, host_pid_dir]:
+      for dir in [host_home_dir, host_log_dir, host_config_dir, host_pid_dir, host_prefix]:
         if not os.path.isdir(dir):
           print "Creating dir %s" % (dir)
           os.makedirs(dir)
@@ -174,6 +177,12 @@ class Multiplier:
         print "Copying config file %s" % str(host_config_file)
         shutil.copyfile(self.source_config_file, host_config_file)
 
+      # Copy version file
+      version_file = os.path.join(host_prefix, "version")
+      if not os.path.isfile(version_file):
+        print "Copying version file %s" % str(version_file)
+        shutil.copyfile(self.source_version_file, version_file)
+
       # Create hostname.sh script to use custom FQDN for each agent.
       host_name_script = os.path.join(host_config_dir, "hostname.sh")
       self.create_host_name_script(host_name, host_name_script)
@@ -183,7 +192,8 @@ class Multiplier:
                      "hostname_script": host_name_script,
                      "public_hostname_script": host_name_script,
                      "logdir": host_log_dir,
-                     "piddir": host_pid_dir}
+                     "piddir": host_pid_dir,
+                     "prefix": host_prefix}
       self.change_config(host_config_file, config_dict)
 
       # Change /etc/hosts file by appending each hostname.


[10/50] ambari git commit: Revert "AMBARI-18976. Config History request execution time depends on config versions count. (mpapirkovskyy)"

Posted by sw...@apache.org.
Revert "AMBARI-18976. Config History request execution time depends on config versions count. (mpapirkovskyy)"

This reverts commit ce4d4fa768375aac631703dbfceac44fd8faa151.


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/568d1e65
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/568d1e65
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/568d1e65

Branch: refs/heads/branch-feature-AMBARI-18901
Commit: 568d1e656b1f1c651e594a54f3b3744f62984653
Parents: 1df9b46
Author: Myroslav Papirkovskyi <mp...@hortonworks.com>
Authored: Mon Nov 28 18:41:20 2016 +0200
Committer: Myroslav Papirkovskyi <mp...@hortonworks.com>
Committed: Mon Nov 28 18:41:20 2016 +0200

----------------------------------------------------------------------
 .../server/state/cluster/ClusterImpl.java       | 42 +++++++++++++++-----
 1 file changed, 32 insertions(+), 10 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/568d1e65/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClusterImpl.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClusterImpl.java b/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClusterImpl.java
index 7bf24ce..8b157c7 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClusterImpl.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClusterImpl.java
@@ -2534,10 +2534,12 @@ public class ClusterImpl implements Cluster {
     if (serviceConfigEntity.getGroupId() == null) {
       Collection<String> configTypes = serviceConfigTypes.get(serviceName);
       List<ClusterConfigMappingEntity> mappingEntities =
-          clusterDAO.getSelectedConfigMappingByTypes(getClusterId(), new ArrayList<>(configTypes));
+          clusterDAO.getClusterConfigMappingEntitiesByCluster(getClusterId());
       for (ClusterConfigMappingEntity entity : mappingEntities) {
-        entity.setSelected(0);
-        clusterDAO.mergeConfigMapping(entity);
+        if (configTypes.contains(entity.getType()) && entity.isSelected() > 0) {
+          entity.setSelected(0);
+          entity = clusterDAO.mergeConfigMapping(entity);
+        }
       }
 
       for (ClusterConfigEntity configEntity : serviceConfigEntity.getClusterConfigEntities()) {
@@ -2597,12 +2599,14 @@ public class ClusterImpl implements Cluster {
   @Transactional
   void selectConfig(String type, String tag, String user) {
     Collection<ClusterConfigMappingEntity> entities =
-      clusterDAO.getLatestClusterConfigMappingsEntityByType(getClusterId(), type);
+        clusterDAO.getClusterConfigMappingEntitiesByCluster(getClusterId());
 
     //disable previous config
     for (ClusterConfigMappingEntity e : entities) {
-      e.setSelected(0);
-      clusterDAO.mergeConfigMapping(e);
+      if (e.isSelected() > 0 && e.getType().equals(type)) {
+        e.setSelected(0);
+        e = clusterDAO.mergeConfigMapping(e);
+      }
     }
 
     ClusterEntity clusterEntity = getClusterEntity();
@@ -2668,15 +2672,32 @@ public class ClusterImpl implements Cluster {
   }
 
   private List<ClusterConfigEntity> getClusterConfigEntitiesByService(String serviceName) {
+    List<ClusterConfigEntity> configEntities = new ArrayList<ClusterConfigEntity>();
+
+    //add configs from this service
     Collection<String> configTypes = serviceConfigTypes.get(serviceName);
-    return clusterDAO.getLatestClusterConfigsByTypes(getClusterId(), new ArrayList<>(configTypes));
+    for (ClusterConfigMappingEntity mappingEntity : clusterDAO.getClusterConfigMappingEntitiesByCluster(getClusterId())) {
+      if (mappingEntity.isSelected() > 0 && configTypes.contains(mappingEntity.getType())) {
+        ClusterConfigEntity configEntity =
+          clusterDAO.findConfig(getClusterId(), mappingEntity.getType(), mappingEntity.getTag());
+        if (configEntity != null) {
+          configEntities.add(configEntity);
+        } else {
+          LOG.error("Desired cluster config type={}, tag={} is not present in database," +
+            " unable to add to service config version");
+        }
+      }
+    }
+    return configEntities;
   }
 
   @Override
   public Config getDesiredConfigByType(String configType) {
-    List<ClusterConfigMappingEntity> entities = clusterDAO.getLatestClusterConfigMappingsEntityByType(getClusterId(), configType);
-    if (!entities.isEmpty()) {
-      return getConfig(configType, entities.get(0).getTag());
+    for (ClusterConfigMappingEntity e : clusterDAO.getClusterConfigMappingEntitiesByCluster(
+        getClusterId())) {
+      if (e.isSelected() > 0 && e.getType().equals(configType)) {
+        return getConfig(e.getType(), e.getTag());
+      }
     }
 
     return null;
@@ -3433,4 +3454,5 @@ public class ClusterImpl implements Cluster {
 
     m_clusterPropertyCache.clear();
   }
+
 }


[24/50] ambari git commit: AMBARI-19010. Log Search external login credential let every user in (oleewere)

Posted by sw...@apache.org.
AMBARI-19010. Log Search external login credential let every user in (oleewere)

Change-Id: Iab889180bfb4916fd06240588b6cdd80493889a1


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/26868322
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/26868322
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/26868322

Branch: refs/heads/branch-feature-AMBARI-18901
Commit: 268683220382cb1f0061d1e9135cc882b74c0f97
Parents: 6f4055f
Author: oleewere <ol...@gmail.com>
Authored: Tue Nov 29 13:16:51 2016 +0100
Committer: oleewere <ol...@gmail.com>
Committed: Tue Nov 29 18:02:11 2016 +0100

----------------------------------------------------------------------
 .../ambari/logsearch/common/ExternalServerClient.java   | 12 ++++++++++--
 1 file changed, 10 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/26868322/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/ExternalServerClient.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/ExternalServerClient.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/ExternalServerClient.java
index 1207373..230ca8c 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/ExternalServerClient.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/ExternalServerClient.java
@@ -23,9 +23,11 @@ import javax.inject.Named;
 import javax.ws.rs.client.Invocation;
 import javax.ws.rs.client.WebTarget;
 import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.Response;
 
 import org.apache.ambari.logsearch.conf.AuthPropsConfig;
 import org.apache.ambari.logsearch.util.SSLUtil;
+import org.apache.commons.httpclient.auth.InvalidCredentialsException;
 import org.apache.log4j.Logger;
 import org.glassfish.jersey.client.JerseyClient;
 import org.glassfish.jersey.client.JerseyClientBuilder;
@@ -64,9 +66,15 @@ public class ExternalServerClient {
     WebTarget target = client.target(url);
     LOG.debug("URL: " + url);
     
-    Invocation.Builder invocationBuilder =  target.request(MediaType.APPLICATION_JSON_TYPE);
+    Invocation.Builder invocationBuilder =  target.request();
     try {
-      return invocationBuilder.get().readEntity(klass);
+      Response response = invocationBuilder.get();
+      if (response.getStatus() != Response.Status.OK.getStatusCode()
+        && response.getStatus() != Response.Status.FOUND.getStatusCode()) {
+        throw new InvalidCredentialsException(String.format("External auth failed with status code: %d, response: %s",
+          response.getStatus(), response.readEntity(String.class)));
+      }
+      return response.readEntity(klass);
     } catch (Exception e) {
       throw new Exception(e.getCause());
     } finally {


[48/50] ambari git commit: AMBARI-19040. Fix NPE in UpgradeCatalog250Test.testExecuteDMLUpdates (rlevas)

Posted by sw...@apache.org.
AMBARI-19040. Fix NPE in UpgradeCatalog250Test.testExecuteDMLUpdates (rlevas)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/7b53d070
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/7b53d070
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/7b53d070

Branch: refs/heads/branch-feature-AMBARI-18901
Commit: 7b53d07040fee054fa3592da69d9d7f68b3adfd9
Parents: a5ce823
Author: Robert Levas <rl...@hortonworks.com>
Authored: Wed Nov 30 20:40:12 2016 -0500
Committer: Robert Levas <rl...@hortonworks.com>
Committed: Wed Nov 30 20:40:12 2016 -0500

----------------------------------------------------------------------
 .../org/apache/ambari/server/upgrade/UpgradeCatalog250Test.java | 5 +++++
 1 file changed, 5 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/7b53d070/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog250Test.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog250Test.java b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog250Test.java
index 4135919..ce0b387 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog250Test.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog250Test.java
@@ -211,11 +211,13 @@ public class UpgradeCatalog250Test {
   public void testExecuteDMLUpdates() throws Exception {
     Method updateAmsConfigs = UpgradeCatalog250.class.getDeclaredMethod("updateAMSConfigs");
     Method updateKafkaConfigs = UpgradeCatalog250.class.getDeclaredMethod("updateKafkaConfigs");
+    Method updateHiveLlapConfigs = UpgradeCatalog250.class.getDeclaredMethod("updateHiveLlapConfigs");
     Method addNewConfigurationsFromXml = AbstractUpgradeCatalog.class.getDeclaredMethod("addNewConfigurationsFromXml");
 
     UpgradeCatalog250 upgradeCatalog250 = createMockBuilder(UpgradeCatalog250.class)
       .addMockedMethod(updateAmsConfigs)
       .addMockedMethod(updateKafkaConfigs)
+      .addMockedMethod(updateHiveLlapConfigs)
       .addMockedMethod(addNewConfigurationsFromXml)
       .createMock();
 
@@ -229,6 +231,9 @@ public class UpgradeCatalog250Test {
     upgradeCatalog250.updateKafkaConfigs();
     expectLastCall().once();
 
+    upgradeCatalog250.updateHiveLlapConfigs();
+    expectLastCall().once();
+
     replay(upgradeCatalog250);
 
     upgradeCatalog250.executeDMLUpdates();


[06/50] ambari git commit: AMBARI-18995. HBASE service check fails if run with tty=true (aonishuk)

Posted by sw...@apache.org.
AMBARI-18995. HBASE service check fails if run with tty=true (aonishuk)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/e62bf6ec
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/e62bf6ec
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/e62bf6ec

Branch: refs/heads/branch-feature-AMBARI-18901
Commit: e62bf6ec8d1fee99c562a0d9b4d9ea34525cfaae
Parents: 5bdcc41
Author: Andrew Onishuk <ao...@hortonworks.com>
Authored: Mon Nov 28 17:01:36 2016 +0200
Committer: Andrew Onishuk <ao...@hortonworks.com>
Committed: Mon Nov 28 17:01:36 2016 +0200

----------------------------------------------------------------------
 .../HBASE/0.96.0.2.0/package/files/hbaseSmokeVerify.sh             | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/e62bf6ec/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/files/hbaseSmokeVerify.sh
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/files/hbaseSmokeVerify.sh b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/files/hbaseSmokeVerify.sh
index 5c320c0..8b085e8 100644
--- a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/files/hbaseSmokeVerify.sh
+++ b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/files/hbaseSmokeVerify.sh
@@ -25,7 +25,7 @@ hbase_cmd=$3
 echo "scan 'ambarismoketest'" | $hbase_cmd --config $conf_dir shell > /tmp/hbase_chk_verify
 cat /tmp/hbase_chk_verify
 echo "Looking for $data"
-grep -q $data /tmp/hbase_chk_verify
+tr -d '\n|\t| ' < /tmp/hbase_chk_verify | grep -q $data
 if [ "$?" -ne 0 ]
 then
   exit 1


[28/50] ambari git commit: AMBARI-18792. Update some configuration properties for hive interactive for the HDP 2.6 stack - minor fix (Siddharth Seth via smohanty)

Posted by sw...@apache.org.
AMBARI-18792. Update some configuration properties for hive interactive for the HDP 2.6 stack - minor fix (Siddharth Seth via smohanty)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/20735887
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/20735887
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/20735887

Branch: refs/heads/branch-feature-AMBARI-18901
Commit: 20735887018d4de6e4e5d67839a2aa30eea0f2ad
Parents: 052da57
Author: Sumit Mohanty <sm...@hortonworks.com>
Authored: Tue Nov 29 15:35:20 2016 -0800
Committer: Sumit Mohanty <sm...@hortonworks.com>
Committed: Tue Nov 29 15:35:20 2016 -0800

----------------------------------------------------------------------
 .../java/org/apache/ambari/server/upgrade/UpgradeCatalog250.java  | 3 ++-
 1 file changed, 2 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/20735887/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog250.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog250.java b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog250.java
index 44ce895..52de784 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog250.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog250.java
@@ -124,7 +124,7 @@ public class UpgradeCatalog250 extends AbstractUpgradeCatalog {
    */
   @Override
   protected void executePreDMLUpdates() throws AmbariException, SQLException {
-    updateHiveLlapConfigs();
+
   }
 
   /**
@@ -135,6 +135,7 @@ public class UpgradeCatalog250 extends AbstractUpgradeCatalog {
     addNewConfigurationsFromXml();
     updateAMSConfigs();
     updateKafkaConfigs();
+    updateHiveLlapConfigs();
   }
 
   protected void updateHostVersionTable() throws SQLException {


[12/50] ambari git commit: AMBARI-18951. Force InnoDB usage for MySQL. (mpapirkovskyy)

Posted by sw...@apache.org.
AMBARI-18951. Force InnoDB usage for MySQL. (mpapirkovskyy)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/98e41c67
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/98e41c67
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/98e41c67

Branch: refs/heads/branch-feature-AMBARI-18901
Commit: 98e41c677b2e22e694eda27ee6217d560ac4dbe2
Parents: 911b917
Author: Myroslav Papirkovskyi <mp...@hortonworks.com>
Authored: Mon Nov 28 18:25:07 2016 +0200
Committer: Myroslav Papirkovskyi <mp...@hortonworks.com>
Committed: Mon Nov 28 18:44:23 2016 +0200

----------------------------------------------------------------------
 .../server/orm/helpers/dbms/MySqlHelper.java    | 18 ++++++++++
 .../server/upgrade/SchemaUpgradeHelper.java     | 35 +++++++++++++++++++-
 .../main/resources/Ambari-DDL-MySQL-CREATE.sql  |  1 +
 3 files changed, 53 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/98e41c67/ambari-server/src/main/java/org/apache/ambari/server/orm/helpers/dbms/MySqlHelper.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/helpers/dbms/MySqlHelper.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/helpers/dbms/MySqlHelper.java
index 38fe062..8a83c90 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/orm/helpers/dbms/MySqlHelper.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/helpers/dbms/MySqlHelper.java
@@ -19,8 +19,13 @@
 package org.apache.ambari.server.orm.helpers.dbms;
 
 import org.apache.ambari.server.orm.DBAccessor;
+import org.eclipse.persistence.exceptions.ValidationException;
 import org.eclipse.persistence.platform.database.DatabasePlatform;
 
+import java.io.IOException;
+import java.io.Writer;
+import java.util.List;
+
 public class MySqlHelper extends GenericDbmsHelper {
   public MySqlHelper(DatabasePlatform databasePlatform) {
     super(databasePlatform);
@@ -75,4 +80,17 @@ public class MySqlHelper extends GenericDbmsHelper {
                                     .append("AND constraints.TABLE_NAME = \"").append(tableName).append("\"");
     return statement.toString();
   }
+
+  @Override
+  public Writer writeCreateTableStatement(Writer writer, String tableName,
+                                          List<DBAccessor.DBColumnInfo> columns,
+                                          List<String> primaryKeyColumns) {
+    Writer defaultWriter = super.writeCreateTableStatement(writer, tableName, columns, primaryKeyColumns);
+    try {
+      defaultWriter.write(" ENGINE=INNODB");
+    } catch (IOException e) {
+      throw ValidationException.fileError(e);
+    }
+    return defaultWriter;
+  }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/98e41c67/ambari-server/src/main/java/org/apache/ambari/server/upgrade/SchemaUpgradeHelper.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/SchemaUpgradeHelper.java b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/SchemaUpgradeHelper.java
index f772024..1b5503e 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/SchemaUpgradeHelper.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/SchemaUpgradeHelper.java
@@ -45,6 +45,7 @@ import com.google.inject.Inject;
 import com.google.inject.Injector;
 import com.google.inject.multibindings.Multibinder;
 import com.google.inject.persist.PersistService;
+import org.springframework.jdbc.support.JdbcUtils;
 
 public class SchemaUpgradeHelper {
   private static final Logger LOG = LoggerFactory.getLogger
@@ -330,7 +331,7 @@ public class SchemaUpgradeHelper {
   /**
    * Checks if source version meets minimal requirements for upgrade
    *
-   * @param minUpgradeVersion min allowed version for the upgrade, could be obtained via {@link SchemaUpgradeHelper.getMinimalUpgradeCatalogVersion}
+   * @param minUpgradeVersion min allowed version for the upgrade, could be obtained via {@link #getMinimalUpgradeCatalogVersion()}
    * @param sourceVersion current version of the Database, which need to be upgraded
    *
    * @return  true if upgrade is allowed or false if not
@@ -343,6 +344,29 @@ public class SchemaUpgradeHelper {
     return VersionUtils.compareVersions(sourceVersion, minUpgradeVersion) >= 0;
   }
 
+  private List<String> getMyISAMTables() throws SQLException {
+    if (!configuration.getDatabaseType().equals(Configuration.DatabaseType.MYSQL)) {
+      return Collections.emptyList();
+    }
+    List<String> myISAMTables = new ArrayList<>();
+    String query = String.format("SELECT table_name FROM information_schema.tables WHERE table_schema = '%s' " +
+      "AND engine = 'MyISAM' AND table_type = 'BASE TABLE'", configuration.getServerDBName());
+    Statement statement = null;
+    ResultSet rs = null;
+    try {
+      statement = dbAccessor.getConnection().createStatement();
+      rs = statement.executeQuery(query);
+      if (rs != null) {
+        while (rs.next()) {
+          myISAMTables.add(rs.getString("table_name"));
+        }
+      }
+    } finally {
+      JdbcUtils.closeResultSet(rs);
+      JdbcUtils.closeStatement(statement);
+    }
+    return myISAMTables;
+  }
 
   /**
    * Upgrade Ambari DB schema to the target version passed in as the only
@@ -363,6 +387,15 @@ public class SchemaUpgradeHelper {
       Injector injector = Guice.createInjector(new UpgradeHelperModule(), new AuditLoggerModule());
       SchemaUpgradeHelper schemaUpgradeHelper = injector.getInstance(SchemaUpgradeHelper.class);
 
+      //Fail if MySQL database has tables with MyISAM engine
+      List<String> myISAMTables = schemaUpgradeHelper.getMyISAMTables();
+      if (!myISAMTables.isEmpty()) {
+        String errorMessage = String.format("Unsupported MyISAM table %s detected. " +
+            "For correct upgrade database should be migrated to InnoDB engine.", myISAMTables.get(0));
+        LOG.error(errorMessage);
+        throw new AmbariException(errorMessage);
+      }
+
       String targetVersion = schemaUpgradeHelper.getAmbariServerVersion();
       LOG.info("Upgrading schema to target version = " + targetVersion);
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/98e41c67/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql
index 09042b5..b01ed2f 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql
@@ -26,6 +26,7 @@ delimiter ;
 
 # USE @schema;
 
+SET default_storage_engine=INNODB;
 
 CREATE TABLE stack(
   stack_id BIGINT NOT NULL,


[32/50] ambari git commit: AMBARI-18979. Optimize loading of views in cloud environments (Laszlo Puskas via magyari_sandor)

Posted by sw...@apache.org.
AMBARI-18979. Optimize loading of views in cloud environments (Laszlo Puskas via magyari_sandor)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/fbff7f7e
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/fbff7f7e
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/fbff7f7e

Branch: refs/heads/branch-feature-AMBARI-18901
Commit: fbff7f7e4d1574fcd25bd6c0384a5bbf71d98d35
Parents: 8036eb2
Author: Laszlo Puskas <lp...@hortonworks.com>
Authored: Fri Nov 25 17:35:42 2016 +0100
Committer: Sandor Magyari <sm...@hortonworks.com>
Committed: Wed Nov 30 09:09:49 2016 +0100

----------------------------------------------------------------------
 .../server/configuration/Configuration.java     |  23 +-
 .../ambari/server/events/AmbariEvent.java       |   5 +
 .../events/ClusterConfigFinishedEvent.java      |  53 +++
 .../ambari/server/topology/TopologyManager.java |  16 +-
 .../ambari/server/view/ViewExtractor.java       |   2 +-
 .../apache/ambari/server/view/ViewRegistry.java | 381 +++++++++++--------
 .../ClusterDeployWithStartOnlyTest.java         |   2 +-
 ...InstallWithoutStartOnComponentLevelTest.java |   2 +-
 .../ClusterInstallWithoutStartTest.java         |   2 +-
 .../ambari/server/view/ViewRegistryTest.java    |   2 +
 10 files changed, 314 insertions(+), 174 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/fbff7f7e/ambari-server/src/main/java/org/apache/ambari/server/configuration/Configuration.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/configuration/Configuration.java b/ambari-server/src/main/java/org/apache/ambari/server/configuration/Configuration.java
index 4a322e0..ef35ce8 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/configuration/Configuration.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/configuration/Configuration.java
@@ -1839,6 +1839,22 @@ public class Configuration {
       "server.stages.parallel", Boolean.TRUE);
 
   /**
+   *
+   * Property driving the view extraction.
+   * It only applies to  blueprint deployments.
+   *
+   * If set to TRUE on ambari-server startup only the system views are loaded; non-system views are extracted upon a cluster
+   * creation request is received and the cluster configuration is successfully performed
+   *
+   * It is advised to use this property only in cases when ambari-server startup time is critical (eg. cloud environments)
+   *
+   * By default this is FALSE so all views are extracted and deployed at server startup.
+   */
+  @Markdown(description = "Drives view extraction in case of blueprint deployments; non-system views are deployed when cluster configuration is successful")
+  public static final ConfigurationProperty<Boolean> VIEW_EXTRACT_AFTER_CLUSTER_CONFIG =  new ConfigurationProperty<>("view.extract-after-cluster-config", Boolean.FALSE);
+
+
+  /**
    * In case this is set to DEPENDENCY_ORDERED one stage is created for each request and command dependencies are
    * handled directly by ActionScheduler. In case of STAGE (which is the default) one or more stages are
    * created depending on dependencies.
@@ -4286,6 +4302,11 @@ public class Configuration {
     return Integer.parseInt(getProperty(VIEW_REQUEST_THREADPOOL_MAX_SIZE));
   }
 
+  public Boolean extractViewsAfterClusterConfig() {
+    return Boolean.parseBoolean(getProperty(VIEW_EXTRACT_AFTER_CLUSTER_CONFIG));
+  }
+
+
   /**
    * Get the time, in ms, that a request to a view will wait for an available
    * thread to handle the request before returning an error.
@@ -5605,7 +5626,7 @@ public class Configuration {
     String acceptors = getProperty(SRVR_API_ACCEPTOR_THREAD_COUNT);
     return StringUtils.isEmpty(acceptors) ? null : Integer.parseInt(acceptors);
   }
- 
+
   public String getPamConfigurationFile() {
     return getProperty(PAM_CONFIGURATION_FILE);
   }

http://git-wip-us.apache.org/repos/asf/ambari/blob/fbff7f7e/ambari-server/src/main/java/org/apache/ambari/server/events/AmbariEvent.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/events/AmbariEvent.java b/ambari-server/src/main/java/org/apache/ambari/server/events/AmbariEvent.java
index 7ec5972..868ce3f 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/events/AmbariEvent.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/events/AmbariEvent.java
@@ -128,6 +128,11 @@ public abstract class AmbariEvent {
     CLUSTER_CONFIG_CHANGED,
 
     /**
+     * Cluster configuration finished.
+     */
+    CLUSTER_CONFIG_FINISHED,
+
+    /**
      * Metrics Collector force refresh needed.
      */
     METRICS_COLLECTOR_HOST_DOWN,

http://git-wip-us.apache.org/repos/asf/ambari/blob/fbff7f7e/ambari-server/src/main/java/org/apache/ambari/server/events/ClusterConfigFinishedEvent.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/events/ClusterConfigFinishedEvent.java b/ambari-server/src/main/java/org/apache/ambari/server/events/ClusterConfigFinishedEvent.java
new file mode 100644
index 0000000..f0cac72
--- /dev/null
+++ b/ambari-server/src/main/java/org/apache/ambari/server/events/ClusterConfigFinishedEvent.java
@@ -0,0 +1,53 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.events;
+
+/**
+ * The {@link ClusterConfigFinishedEvent} class is fired when a
+ * cluster configuration is successfully updated.
+ */
+public class ClusterConfigFinishedEvent extends AmbariEvent {
+  private final String clusterName;
+
+
+  public ClusterConfigFinishedEvent(String clusterName) {
+    super(AmbariEventType.CLUSTER_CONFIG_FINISHED);
+    this.clusterName = clusterName;
+  }
+
+  /**
+   * Get the cluster name
+   *
+   * @return
+   */
+  public String getClusterName() {
+    return clusterName;
+  }
+
+  /**
+   * {@inheritDoc}
+   */
+  @Override
+  public String toString() {
+    StringBuilder buffer = new StringBuilder("ClusterConfigChangedEvent{");
+    buffer.append("clusterName=").append(getClusterName());
+    buffer.append("}");
+    return buffer.toString();
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/fbff7f7e/ambari-server/src/main/java/org/apache/ambari/server/topology/TopologyManager.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/topology/TopologyManager.java b/ambari-server/src/main/java/org/apache/ambari/server/topology/TopologyManager.java
index 341633e..d6a4bdd 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/topology/TopologyManager.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/topology/TopologyManager.java
@@ -58,6 +58,7 @@ import org.apache.ambari.server.controller.spi.ResourceProvider;
 import org.apache.ambari.server.controller.spi.SystemException;
 import org.apache.ambari.server.controller.spi.UnsupportedPropertyException;
 import org.apache.ambari.server.events.AmbariEvent;
+import org.apache.ambari.server.events.ClusterConfigFinishedEvent;
 import org.apache.ambari.server.events.HostRemovedEvent;
 import org.apache.ambari.server.events.RequestFinishedEvent;
 import org.apache.ambari.server.events.publishers.AmbariEventPublisher;
@@ -114,6 +115,9 @@ public class TopologyManager {
   @Inject
   private SecurityConfigurationFactory securityConfigurationFactory;
 
+  @Inject
+  private AmbariEventPublisher ambariEventPublisher;
+
   /**
    * A boolean not cached thread-local (volatile) to prevent double-checked
    * locking on the synchronized keyword.
@@ -145,8 +149,9 @@ public class TopologyManager {
       : executor;
   }
 
+  // executed by the IoC framework after creating the object (guice)
   @Inject
-  public void setEventPublisher(AmbariEventPublisher ambariEventPublisher) {
+  private void register() {
     ambariEventPublisher.register(this);
   }
 
@@ -280,6 +285,13 @@ public class TopologyManager {
 
     addClusterConfigRequest(topology, new ClusterConfigurationRequest(
       ambariContext, topology, true, stackAdvisorBlueprintProcessor, configureSecurity));
+    executor.submit(new Callable<Boolean>() {
+      @Override
+      public Boolean call() throws Exception {
+        ambariEventPublisher.publish(new ClusterConfigFinishedEvent(clusterName));
+        return Boolean.TRUE;
+      }
+    });
     LogicalRequest logicalRequest = processRequest(persistedRequest, topology, provisionId);
 
     //todo: this should be invoked as part of a generic lifecycle event which could possibly
@@ -352,7 +364,7 @@ public class TopologyManager {
 
     Map<String, String> requestInfoProps = new HashMap<>();
     requestInfoProps.put(org.apache.ambari.server.controller.spi.Request.REQUEST_INFO_BODY_PROPERTY,
-        "{\"" + ArtifactResourceProvider.ARTIFACT_DATA_PROPERTY + "\": " + descriptor + "}");
+      "{\"" + ArtifactResourceProvider.ARTIFACT_DATA_PROPERTY + "\": " + descriptor + "}");
 
     org.apache.ambari.server.controller.spi.Request request = new RequestImpl(Collections.<String>emptySet(),
         Collections.singleton(properties), requestInfoProps, null);

http://git-wip-us.apache.org/repos/asf/ambari/blob/fbff7f7e/ambari-server/src/main/java/org/apache/ambari/server/view/ViewExtractor.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/view/ViewExtractor.java b/ambari-server/src/main/java/org/apache/ambari/server/view/ViewExtractor.java
index 3425691..0dabf90 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/view/ViewExtractor.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/view/ViewExtractor.java
@@ -74,7 +74,7 @@ public class ViewExtractor {
 
     try {
       // Remove directory if jar was updated since last extracting
-      if (archiveDir.exists() && viewArchive.lastModified() > archiveDir.lastModified()) {
+      if (archiveDir.exists() && viewArchive != null && viewArchive.lastModified() > archiveDir.lastModified()) {
         FileUtils.deleteDirectory(archiveDir);
       }
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/fbff7f7e/ambari-server/src/main/java/org/apache/ambari/server/view/ViewRegistry.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/view/ViewRegistry.java b/ambari-server/src/main/java/org/apache/ambari/server/view/ViewRegistry.java
index 7f58485..ab83696 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/view/ViewRegistry.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/view/ViewRegistry.java
@@ -57,6 +57,7 @@ import org.apache.ambari.server.controller.AmbariServer;
 import org.apache.ambari.server.controller.AmbariSessionManager;
 import org.apache.ambari.server.controller.spi.Resource;
 import org.apache.ambari.server.controller.spi.ResourceProvider;
+import org.apache.ambari.server.events.ClusterConfigFinishedEvent;
 import org.apache.ambari.server.events.ServiceInstalledEvent;
 import org.apache.ambari.server.events.publishers.AmbariEventPublisher;
 import org.apache.ambari.server.orm.dao.MemberDAO;
@@ -142,7 +143,7 @@ public class ViewRegistry {
   private static final String EXTRACT_COMMAND = "extract";
   private static final String ALL_VIEWS_REG_EXP = ".*";
   protected static final int DEFAULT_REQUEST_CONNECT_TIMEOUT = 5000;
-  protected static final int DEFAULT_REQUEST_READ_TIMEOUT    = 10000;
+  protected static final int DEFAULT_REQUEST_READ_TIMEOUT = 10000;
   private static final String VIEW_AMBARI_VERSION_REGEXP = "^((\\d+\\.)?)*(\\*|\\d+)$";
   private static final String VIEW_LOG_FILE = "view.log4j.properties";
   private static final String AMBARI_LOG_FILE = "log4j.properties";
@@ -313,7 +314,7 @@ public class ViewRegistry {
   @Inject
   RemoteAmbariClusterDAO remoteAmbariClusterDAO;
 
- // ----- Constructors -----------------------------------------------------
+  // ----- Constructors -----------------------------------------------------
 
   /**
    * Create the view registry.
@@ -329,7 +330,7 @@ public class ViewRegistry {
   /**
    * Registry main method.
    *
-   * @param args  the command line arguments
+   * @param args the command line arguments
    */
   public static void main(String[] args) {
 
@@ -337,8 +338,8 @@ public class ViewRegistry {
 
       if (args[0].equals(EXTRACT_COMMAND)) {
 
-        String     archivePath = args[1];
-        ViewModule viewModule  = new ViewModule();
+        String archivePath = args[1];
+        ViewModule viewModule = new ViewModule();
 
         try {
           if (extractViewArchive(archivePath, viewModule, true)) {
@@ -366,9 +367,8 @@ public class ViewRegistry {
   /**
    * Get a view definition for the given name.
    *
-   * @param viewName  the view name
-   * @param version   the version
-   *
+   * @param viewName the view name
+   * @param version  the version
    * @return the view definition for the given name
    */
   public ViewEntity getDefinition(String viewName, String version) {
@@ -378,8 +378,7 @@ public class ViewRegistry {
   /**
    * Get the view definition for the given resource type.
    *
-   * @param resourceTypeEntity  the resource type
-   *
+   * @param resourceTypeEntity the resource type
    * @return the view definition for the given resource type or null
    */
   public ViewEntity getDefinition(ResourceTypeEntity resourceTypeEntity) {
@@ -397,7 +396,7 @@ public class ViewRegistry {
   /**
    * Add a view definition to the registry.
    *
-   * @param definition  the definition
+   * @param definition the definition
    */
   public void addDefinition(ViewEntity definition) {
     viewDefinitions.put(definition.getName(), definition);
@@ -406,8 +405,7 @@ public class ViewRegistry {
   /**
    * Get the collection of view instances for the given view definition.
    *
-   * @param definition  the view definition
-   *
+   * @param definition the view definition
    * @return the collection of view instances for the view definition
    */
   public Collection<ViewInstanceEntity> getInstanceDefinitions(ViewEntity definition) {
@@ -423,10 +421,9 @@ public class ViewRegistry {
   /**
    * Get the instance definition for the given view name and instance name.
    *
-   * @param viewName      the view name
-   * @param version       the version
-   * @param instanceName  the instance name
-   *
+   * @param viewName     the view name
+   * @param version      the version
+   * @param instanceName the instance name
    * @return the view instance definition for the given view and instance name
    */
   public ViewInstanceEntity getInstanceDefinition(String viewName, String version, String instanceName) {
@@ -439,8 +436,8 @@ public class ViewRegistry {
   /**
    * Add an instance definition for the given view definition.
    *
-   * @param definition          the owning view definition
-   * @param instanceDefinition  the instance definition
+   * @param definition         the owning view definition
+   * @param instanceDefinition the instance definition
    */
   public void addInstanceDefinition(ViewEntity definition, ViewInstanceEntity instanceDefinition) {
     Map<String, ViewInstanceEntity> instanceDefinitions = viewInstanceDefinitions.get(definition);
@@ -459,8 +456,8 @@ public class ViewRegistry {
   /**
    * Remove an instance definition for the given view definition.
    *
-   * @param definition    the owning view definition
-   * @param instanceName  the instance name
+   * @param definition   the owning view definition
+   * @param instanceName the instance name
    */
   public void removeInstanceDefinition(ViewEntity definition, String instanceName) {
     Map<String, ViewInstanceEntity> instanceDefinitions = viewInstanceDefinitions.get(definition);
@@ -480,7 +477,7 @@ public class ViewRegistry {
   /**
    * Init the singleton instance.
    *
-   * @param singleton  the view registry
+   * @param singleton the view registry
    */
   public static void initInstance(ViewRegistry singleton) {
     ViewRegistry.singleton = singleton;
@@ -489,7 +486,7 @@ public class ViewRegistry {
   /**
    * Get the view registry singleton.
    *
-   * @return  the view registry
+   * @return the view registry
    */
   public static ViewRegistry getInstance() {
     return singleton;
@@ -498,9 +495,8 @@ public class ViewRegistry {
   /**
    * Get the sub-resource definitions for the given view name.
    *
-   * @param viewName  the instance name
-   * @param version   the version
-   *
+   * @param viewName the instance name
+   * @param version  the version
    * @return the set of sub-resource definitions
    */
   public Set<SubResourceDefinition> getSubResourceDefinitions(
@@ -511,11 +507,14 @@ public class ViewRegistry {
 
     return subResourceDefinitionsMap.get(viewName);
   }
+
   /**
    * Read all view archives.
    */
   public void readViewArchives() {
-    readViewArchives(false, false, ALL_VIEWS_REG_EXP);
+    boolean systemViewsOnly = configuration.extractViewsAfterClusterConfig() && clustersProvider.get().getClusters().isEmpty();
+    LOG.info("Triggering loading of [{}] views", systemViewsOnly ? "SYSTEM" : "ALL");
+    readViewArchives(systemViewsOnly, false, ALL_VIEWS_REG_EXP);
   }
 
   /**
@@ -530,8 +529,7 @@ public class ViewRegistry {
   /**
    * Determine whether or not the given view instance exists.
    *
-   * @param instanceEntity  the view instance entity
-   *
+   * @param instanceEntity the view instance entity
    * @return true if the the given view instance exists; false otherwise
    */
   public boolean instanceExists(ViewInstanceEntity instanceEntity) {
@@ -545,12 +543,11 @@ public class ViewRegistry {
   /**
    * Install the given view instance with its associated view.
    *
-   * @param instanceEntity  the view instance entity
-   *
-   * @throws ValidationException       if the given instance fails the validation checks
-   * @throws IllegalArgumentException  if the view associated with the given instance
-   *                                   does not exist
-   * @throws SystemException           if the instance can not be installed
+   * @param instanceEntity the view instance entity
+   * @throws ValidationException      if the given instance fails the validation checks
+   * @throws IllegalArgumentException if the view associated with the given instance
+   *                                  does not exist
+   * @throws SystemException          if the instance can not be installed
    */
   public void installViewInstance(ViewInstanceEntity instanceEntity)
       throws ValidationException, IllegalArgumentException, SystemException {
@@ -558,8 +555,8 @@ public class ViewRegistry {
 
     if (viewEntity != null) {
       String instanceName = instanceEntity.getName();
-      String viewName     = viewEntity.getCommonName();
-      String version      = viewEntity.getVersion();
+      String viewName = viewEntity.getCommonName();
+      String version = viewEntity.getVersion();
 
       if (getInstanceDefinition(viewName, version, instanceName) == null) {
         if (LOG.isDebugEnabled()) {
@@ -602,10 +599,9 @@ public class ViewRegistry {
   /**
    * Update a view instance for the view with the given view name.
    *
-   * @param instanceEntity  the view instance entity
-   *
-   * @throws ValidationException   if the given instance fails the validation checks
-   * @throws SystemException       if the instance can not be updated
+   * @param instanceEntity the view instance entity
+   * @throws ValidationException if the given instance fails the validation checks
+   * @throws SystemException     if the instance can not be updated
    */
   public void updateViewInstance(ViewInstanceEntity instanceEntity)
       throws ValidationException, SystemException {
@@ -624,9 +620,9 @@ public class ViewRegistry {
    *
    * @param instanceEntity
    */
-  public void updateView(ViewInstanceEntity instanceEntity){
+  public void updateView(ViewInstanceEntity instanceEntity) {
     ViewEntity viewEntity = getDefinition(instanceEntity.getViewName());
-    if(null != viewEntity && null != viewEntity.getView()){
+    if (null != viewEntity && null != viewEntity.getView()) {
       viewEntity.getView().onUpdate(instanceEntity);
     }
   }
@@ -634,9 +630,8 @@ public class ViewRegistry {
   /**
    * Get a view instance entity for the given view name and instance name.
    *
-   * @param viewName      the view name
-   * @param instanceName  the instance name
-   *
+   * @param viewName     the view name
+   * @param instanceName the instance name
    * @return a view instance entity for the given view name and instance name.
    */
   public ViewInstanceEntity getViewInstanceEntity(String viewName, String instanceName) {
@@ -646,7 +641,7 @@ public class ViewRegistry {
   /**
    * Uninstall a view instance for the view with the given view name.
    *
-   * @param instanceEntity  the view instance entity
+   * @param instanceEntity the view instance entity
    * @throws IllegalStateException if the given instance is not in a valid state
    */
   @Transactional
@@ -655,8 +650,8 @@ public class ViewRegistry {
 
     if (viewEntity != null) {
       String instanceName = instanceEntity.getName();
-      String viewName     = viewEntity.getCommonName();
-      String version      = viewEntity.getVersion();
+      String viewName = viewEntity.getCommonName();
+      String version = viewEntity.getVersion();
 
       if (getInstanceDefinition(viewName, version, instanceName) != null) {
         if (instanceEntity.isXmlDriven()) {
@@ -664,7 +659,7 @@ public class ViewRegistry {
         }
         if (LOG.isDebugEnabled()) {
           LOG.debug("Deleting view instance " + viewName + "/" +
-              version + "/" +instanceName);
+              version + "/" + instanceName);
         }
         List<PrivilegeEntity> instancePrivileges = privilegeDAO.findByResourceId(instanceEntity.getResource().getId());
         for (PrivilegeEntity privilegeEntity : instancePrivileges) {
@@ -683,8 +678,8 @@ public class ViewRegistry {
   /**
    * Remove the data entry keyed by the given key from the given instance entity.
    *
-   * @param instanceEntity  the instance entity
-   * @param key             the data key
+   * @param instanceEntity the instance entity
+   * @param key            the data key
    */
   @Transactional
   public void removeInstanceData(ViewInstanceEntity instanceEntity, String key) {
@@ -699,17 +694,17 @@ public class ViewRegistry {
   /**
    * Copy all privileges from one view instance to another
    *
-   * @param sourceInstanceEntity  the source instance entity
-   * @param targetInstanceEntity  the target instance entity
+   * @param sourceInstanceEntity the source instance entity
+   * @param targetInstanceEntity the target instance entity
    */
   @Transactional
   public void copyPrivileges(ViewInstanceEntity sourceInstanceEntity,
                              ViewInstanceEntity targetInstanceEntity) {
     LOG.debug("Copy all privileges from " + sourceInstanceEntity.getName() + " to " +
-              targetInstanceEntity.getName());
+        targetInstanceEntity.getName());
     List<PrivilegeEntity> targetInstancePrivileges = privilegeDAO.findByResourceId(targetInstanceEntity.getResource().getId());
     if (targetInstancePrivileges.size() > 0) {
-      LOG.warn("Target instance {} already has privileges assigned, these will not be deleted. Manual clean up may be needed",targetInstanceEntity.getName());
+      LOG.warn("Target instance {} already has privileges assigned, these will not be deleted. Manual clean up may be needed", targetInstanceEntity.getName());
     }
 
     List<PrivilegeEntity> sourceInstancePrivileges = privilegeDAO.findByResourceId(sourceInstanceEntity.getResource().getId());
@@ -721,9 +716,9 @@ public class ViewRegistry {
       try {
         privilegeDAO.create(targetPrivilege);
         targetPrivilege.getPrincipal().getPrivileges().add(sourcePrivilege);
-      } catch (Exception e){
-        LOG.warn("Could not migrate privilege {} ",targetPrivilege);
-        LOG.error("Caught exception",e);
+      } catch (Exception e) {
+        LOG.warn("Could not migrate privilege {} ", targetPrivilege);
+        LOG.error("Caught exception", e);
       }
 
     }
@@ -732,7 +727,7 @@ public class ViewRegistry {
   /**
    * Notify any registered listeners of the given event.
    *
-   * @param event  the event
+   * @param event the event
    */
   public void fireEvent(Event event) {
 
@@ -745,9 +740,9 @@ public class ViewRegistry {
   /**
    * Register the given listener to listen for events from the view identified by the given name and version.
    *
-   * @param listener     the listener
-   * @param viewName     the view name
-   * @param viewVersion  the view version; null indicates all versions
+   * @param listener    the listener
+   * @param viewName    the view name
+   * @param viewVersion the view version; null indicates all versions
    */
   public synchronized void registerListener(Listener listener, String viewName, String viewVersion) {
 
@@ -766,9 +761,9 @@ public class ViewRegistry {
   /**
    * Un-register the given listener from the view identified by the given name and version.
    *
-   * @param listener     the listener
-   * @param viewName     the view name
-   * @param viewVersion  the view version; null indicates all versions
+   * @param listener    the listener
+   * @param viewName    the view name
+   * @param viewVersion the view version; null indicates all versions
    */
   public synchronized void unregisterListener(Listener listener, String viewName, String viewVersion) {
 
@@ -786,12 +781,11 @@ public class ViewRegistry {
    * is permitted for the given user on the view instance identified by
    * the given resource.
    *
-   * @param permissionEntity  the permission entity
-   * @param resourceEntity    the resource entity
-   * @param userName          the user name
-   *
+   * @param permissionEntity the permission entity
+   * @param resourceEntity   the resource entity
+   * @param userName         the user name
    * @return true if the access specified by the given permission
-   *         is permitted for the given user.
+   * is permitted for the given user.
    */
   public boolean hasPermission(PermissionEntity permissionEntity, ResourceEntity resourceEntity, String userName) {
 
@@ -823,11 +817,10 @@ public class ViewRegistry {
    * by the given instance name should be allowed based on the permissions
    * granted to the current user.
    *
-   * @param viewName      the view name
-   * @param version       the view version
-   * @param instanceName  the name of the view instance resource
-   * @param readOnly      indicate whether or not this is for a read only operation
-   *
+   * @param viewName     the view name
+   * @param version      the view version
+   * @param instanceName the name of the view instance resource
+   * @param readOnly     indicate whether or not this is for a read only operation
    * @return true if the access to the view instance is allowed
    */
   public boolean checkPermission(String viewName, String version, String instanceName, boolean readOnly) {
@@ -842,9 +835,8 @@ public class ViewRegistry {
    * Determine whether or not access to the given view instance should be allowed based
    * on the permissions granted to the current user.
    *
-   * @param instanceEntity  the view instance entity
-   * @param readOnly        indicate whether or not this is for a read only operation
-   *
+   * @param instanceEntity the view instance entity
+   * @param readOnly       indicate whether or not this is for a read only operation
    * @return true if the access to the view instance is allowed
    */
   public boolean checkPermission(ViewInstanceEntity instanceEntity, boolean readOnly) {
@@ -867,8 +859,7 @@ public class ViewRegistry {
    * Determine whether or not the given view definition resource should be included
    * based on the permissions granted to the current user.
    *
-   * @param definitionEntity  the view definition entity
-   *
+   * @param definitionEntity the view definition entity
    * @return true if the view instance should be included based on the permissions of the current user
    */
   public boolean includeDefinition(ViewEntity definitionEntity) {
@@ -877,8 +868,8 @@ public class ViewRegistry {
       return true;
     }
 
-    for (ViewInstanceEntity instanceEntity: definitionEntity.getInstances()) {
-      if (checkPermission(instanceEntity, true) ) {
+    for (ViewInstanceEntity instanceEntity : definitionEntity.getInstances()) {
+      if (checkPermission(instanceEntity, true)) {
         return true;
       }
     }
@@ -888,11 +879,10 @@ public class ViewRegistry {
   /**
    * Set the properties of the given view instance from the given property set.
    *
-   * @param instanceEntity  the view instance entity
-   * @param properties      the view instance properties
-   * @param viewConfig      the view configuration
-   * @param classLoader     the class loader for the view
-   *
+   * @param instanceEntity the view instance entity
+   * @param properties     the view instance properties
+   * @param viewConfig     the view configuration
+   * @param classLoader    the class loader for the view
    * @throws SystemException if the view instance properties can not be set
    */
   public void setViewInstanceProperties(ViewInstanceEntity instanceEntity, Map<String, String> properties,
@@ -905,7 +895,7 @@ public class ViewRegistry {
         parameterConfigMap.put(paramConfig.getName(), paramConfig);
       }
       for (Map.Entry<String, String> entry : properties.entrySet()) {
-        String name  = entry.getKey();
+        String name = entry.getKey();
         String value = entry.getValue();
 
         ParameterConfig parameterConfig = parameterConfigMap.get(name);
@@ -923,8 +913,7 @@ public class ViewRegistry {
   /**
    * Get the cluster associated with the given view instance.
    *
-   * @param viewInstance  the view instance
-   *
+   * @param viewInstance the view instance
    * @return the cluster
    */
   public Cluster getCluster(ViewInstanceDefinition viewInstance) {
@@ -959,26 +948,26 @@ public class ViewRegistry {
    * </p>
    * Used for view instance auto creation.
    *
-   * @param event  the service installed event
+   * @param event the service installed event
    */
   @Subscribe
   @AllowConcurrentEvents
   public void onAmbariEvent(ServiceInstalledEvent event) {
 
-    Clusters clusters  = clustersProvider.get();
-    Long     clusterId = event.getClusterId();
+    Clusters clusters = clustersProvider.get();
+    Long clusterId = event.getClusterId();
 
     try {
       org.apache.ambari.server.state.Cluster cluster = clusters.getClusterById(clusterId);
       String clusterName = cluster.getClusterName();
 
-      StackId     stackId       = cluster.getCurrentStackVersion();
-      Set<String> serviceNames  = cluster.getServices().keySet();
+      StackId stackId = cluster.getCurrentStackVersion();
+      Set<String> serviceNames = cluster.getServices().keySet();
 
       for (ViewEntity viewEntity : getDefinitions()) {
 
-        String             viewName   = viewEntity.getName();
-        ViewConfig         viewConfig = viewEntity.getConfiguration();
+        String viewName = viewEntity.getName();
+        ViewConfig viewConfig = viewEntity.getConfiguration();
         AutoInstanceConfig autoConfig = viewConfig.getAutoInstance();
 
         try {
@@ -999,6 +988,15 @@ public class ViewRegistry {
     }
   }
 
+  @Subscribe
+  public void onClusterConfigFinishedEvent(ClusterConfigFinishedEvent event) {
+    if (configuration.extractViewsAfterClusterConfig()) {
+      LOG.info("Trigger extracting NON-SYSTEM views; cluster [{}] ...", event.getClusterName());
+      readNonSystemViewViewArchives();
+      LOG.info("Trigger extracting NON-SYSTEM views; cluster [{}] DONE.", event.getClusterName());
+    }
+  }
+
 
   // ----- helper methods ----------------------------------------------------
 
@@ -1006,11 +1004,10 @@ public class ViewRegistry {
    * Determine whether a new view instance should be automatically created and associated with
    * a cluster based on the given configuration and cluster state.
    *
-   * @param autoConfig    the view instance auto creation configuration
-   * @param stackId       the stack id of the cluster
-   * @param serviceName   the name of the service added which triggered this check
-   * @param serviceNames  the set of service names of the cluster
-   *
+   * @param autoConfig   the view instance auto creation configuration
+   * @param stackId      the stack id of the cluster
+   * @param serviceName  the name of the service added which triggered this check
+   * @param serviceNames the set of service names of the cluster
    * @return true if a new view instance should be created
    */
   private boolean checkAutoInstanceConfig(AutoInstanceConfig autoConfig, StackId stackId,
@@ -1029,7 +1026,7 @@ public class ViewRegistry {
 
           if (id.getStackName().equals(stackId.getStackName())) {
 
-            String stackVersion       = stackId.getStackVersion();
+            String stackVersion = stackId.getStackVersion();
             String configStackVersion = id.getStackVersion();
 
             // make sure that the configured stack version equals the cluster stack version (account for *)
@@ -1038,7 +1035,7 @@ public class ViewRegistry {
             int index = configStackVersion.indexOf('*');
             if (index == -1) {
               compVal = VersionUtils.compareVersions(configStackVersion, stackVersion);
-            } else  if (index > 0) {
+            } else if (index > 0) {
               String[] parts = configStackVersion.substring(0, index).split("\\.");
               compVal = VersionUtils.compareVersions(configStackVersion, stackVersion, parts.length);
             }
@@ -1090,7 +1087,7 @@ public class ViewRegistry {
     String viewName = viewDefinition.getName();
 
     for (ParameterConfig parameterConfiguration : parameterConfigurations) {
-      ViewParameterEntity viewParameterEntity =  new ViewParameterEntity();
+      ViewParameterEntity viewParameterEntity = new ViewParameterEntity();
 
       viewParameterEntity.setViewName(viewName);
       viewParameterEntity.setName(parameterConfiguration.getName());
@@ -1144,8 +1141,8 @@ public class ViewRegistry {
       } else {
         ResourceInstanceFactoryImpl.addResourceDefinition(type, resourceDefinition);
 
-        Class<?> clazz      = resourceConfiguration.getResourceClass(cl);
-        String   idProperty = resourceConfiguration.getIdProperty();
+        Class<?> clazz = resourceConfiguration.getResourceClass(cl);
+        String idProperty = resourceConfiguration.getIdProperty();
 
         ViewSubResourceProvider provider = new ViewSubResourceProvider(type, clazz, idProperty, viewDefinition);
         viewDefinition.addResourceProvider(type, provider);
@@ -1165,7 +1162,7 @@ public class ViewRegistry {
 
     Collection<PermissionEntity> permissions = new HashSet<PermissionEntity>();
     for (PermissionConfig permissionConfiguration : permissionConfigurations) {
-      PermissionEntity permissionEntity =  new PermissionEntity();
+      PermissionEntity permissionEntity = new PermissionEntity();
 
       permissionEntity.setPermissionName(permissionConfiguration.getName());
       permissionEntity.setResourceType(resourceTypeEntity);
@@ -1226,7 +1223,7 @@ public class ViewRegistry {
 
   // bind a view instance definition to the given view definition
   protected void bindViewInstance(ViewEntity viewDefinition,
-                                   ViewInstanceEntity viewInstanceDefinition)
+                                  ViewInstanceEntity viewInstanceDefinition)
       throws ClassNotFoundException {
     viewInstanceDefinition.setViewEntity(viewDefinition);
 
@@ -1240,7 +1237,7 @@ public class ViewRegistry {
     Collection<ViewSubResourceDefinition> resourceDefinitions = viewDefinition.getResourceDefinitions().values();
     for (ViewSubResourceDefinition resourceDefinition : resourceDefinitions) {
 
-      Resource.Type  type           = resourceDefinition.getType();
+      Resource.Type type = resourceDefinition.getType();
       ResourceConfig resourceConfig = resourceDefinition.getResourceConfiguration();
 
       ViewResourceHandler viewResourceService = new ViewSubResourceService(type, viewInstanceDefinition);
@@ -1252,7 +1249,7 @@ public class ViewRegistry {
       if (resourceConfig.isExternal()) {
         externalSubResourceService.addResourceService(resourceConfig.getName(), service);
       } else {
-        viewInstanceDefinition.addService(viewDefinition.getResourceDefinition(type).getPluralName(),service);
+        viewInstanceDefinition.addService(viewDefinition.getResourceDefinition(type).getPluralName(), service);
         viewInstanceDefinition.addResourceProvider(type,
             getProvider(resourceConfig.getProviderClass(cl), viewInstanceContext));
       }
@@ -1263,7 +1260,7 @@ public class ViewRegistry {
   // Set the entities defined in the view persistence element for the given view instance
   private static void setPersistenceEntities(ViewInstanceEntity viewInstanceDefinition) {
     ViewEntity viewDefinition = viewInstanceDefinition.getViewEntity();
-    ViewConfig viewConfig     = viewDefinition.getConfiguration();
+    ViewConfig viewConfig = viewDefinition.getConfiguration();
 
     Collection<ViewEntityEntity> entities = new HashSet<ViewEntityEntity>();
 
@@ -1332,7 +1329,7 @@ public class ViewRegistry {
 
   // get the given view validator class from the given class loader; inject a context
   private static Validator getValidator(Class<? extends Validator> clazz,
-                              final ViewContext viewContext) {
+                                        final ViewContext viewContext) {
     Injector viewInstanceInjector = Guice.createInjector(new AbstractModule() {
       @Override
       protected void configure() {
@@ -1371,17 +1368,16 @@ public class ViewRegistry {
    * Sync given view with data in DB. Ensures that view data in DB is updated,
    * all instances changes from xml config are reflected to DB
    *
-   * @param view                 view config from xml
-   * @param instanceDefinitions  view instances from xml
-   *
+   * @param view                view config from xml
+   * @param instanceDefinitions view instances from xml
    * @throws Exception if the view can not be synced
    */
   private void syncView(ViewEntity view,
                         Set<ViewInstanceEntity> instanceDefinitions)
       throws Exception {
 
-    String      viewName      = view.getName();
-    ViewEntity  persistedView = viewDAO.findByName(viewName);
+    String viewName = view.getName();
+    ViewEntity persistedView = viewDAO.findByName(viewName);
 
     if (LOG.isDebugEnabled()) {
       LOG.debug("Syncing view " + viewName + ".");
@@ -1396,7 +1392,7 @@ public class ViewRegistry {
       // create an admin resource type to represent this view
       ResourceTypeEntity resourceType = resourceTypeDAO.merge(view.getResourceType());
 
-      for( ViewInstanceEntity instance : view.getInstances()) {
+      for (ViewInstanceEntity instance : view.getInstances()) {
         instance.setResource(createViewInstanceResource(resourceType));
       }
       // ... merge the view
@@ -1408,10 +1404,10 @@ public class ViewRegistry {
     view.setPermissions(persistedView.getPermissions());
 
     // make sure that each instance of the view in the db is reflected in the given view
-    for (ViewInstanceEntity persistedInstance : persistedView.getInstances()){
+    for (ViewInstanceEntity persistedInstance : persistedView.getInstances()) {
 
-      String             instanceName = persistedInstance.getName();
-      ViewInstanceEntity instance     = view.getInstanceDefinition(instanceName);
+      String instanceName = persistedInstance.getName();
+      ViewInstanceEntity instance = view.getInstanceDefinition(instanceName);
 
       // if the persisted instance is not in the view ...
       if (instance == null) {
@@ -1437,8 +1433,8 @@ public class ViewRegistry {
 
   // sync the given view instance entity to the matching view instance entity in the registry
   private void syncViewInstance(ViewInstanceEntity instanceEntity) {
-    String viewName     = instanceEntity.getViewDefinition().getViewName();
-    String version      = instanceEntity.getViewDefinition().getVersion();
+    String viewName = instanceEntity.getViewDefinition().getViewName();
+    String version = instanceEntity.getViewDefinition().getVersion();
     String instanceName = instanceEntity.getInstanceName();
 
     ViewInstanceEntity registryEntry = getInstanceDefinition(viewName, version, instanceName);
@@ -1521,32 +1517,85 @@ public class ViewRegistry {
 
   /**
    * Extract a view archive at the specified path
+   *
    * @param path
    */
   public void readViewArchive(Path path) {
 
     File viewDir = configuration.getViewsDir();
     String extractedArchivesPath = viewDir.getAbsolutePath() +
-            File.separator + EXTRACTED_ARCHIVES_DIR;
+        File.separator + EXTRACTED_ARCHIVES_DIR;
 
     File archiveFile = path.toAbsolutePath().toFile();
     if (extractor.ensureExtractedArchiveDirectory(extractedArchivesPath)) {
-        try {
-          final ViewConfig viewConfig = archiveUtility.getViewConfigFromArchive(archiveFile);
-          String viewName = ViewEntity.getViewName(viewConfig.getName(), viewConfig.getVersion());
-          final String extractedArchiveDirPath = extractedArchivesPath + File.separator + viewName;
-          final File extractedArchiveDirFile = archiveUtility.getFile(extractedArchiveDirPath);
-          final ViewEntity viewDefinition = new ViewEntity(viewConfig, configuration, extractedArchiveDirPath);
-          addDefinition(viewDefinition);
-          readViewArchive(viewDefinition, archiveFile, extractedArchiveDirFile, ambariMetaInfoProvider.get().getServerVersion());
-        } catch (Exception e){
-          LOG.error("Could not process archive at path "+path, e);
-        }
+      try {
+        final ViewConfig viewConfig = archiveUtility.getViewConfigFromArchive(archiveFile);
+        String viewName = ViewEntity.getViewName(viewConfig.getName(), viewConfig.getVersion());
+        final String extractedArchiveDirPath = extractedArchivesPath + File.separator + viewName;
+        final File extractedArchiveDirFile = archiveUtility.getFile(extractedArchiveDirPath);
+        final ViewEntity viewDefinition = new ViewEntity(viewConfig, configuration, extractedArchiveDirPath);
+        addDefinition(viewDefinition);
+        readViewArchive(viewDefinition, archiveFile, extractedArchiveDirFile, ambariMetaInfoProvider.get().getServerVersion());
+      } catch (Exception e) {
+        LOG.error("Could not process archive at path " + path, e);
+      }
     }
 
   }
 
+  private void readNonSystemViewViewArchives() {
+    try {
+
+      File viewDir = configuration.getViewsDir();
+      String extractedArchivesPath = viewDir.getAbsolutePath() +
+          File.separator + EXTRACTED_ARCHIVES_DIR;
+
+      File[] files = viewDir.listFiles();
+
+      if (files != null) {
+        final String serverVersion = ambariMetaInfoProvider.get().getServerVersion();
+
+        final ExecutorService executorService = getExecutorService(configuration);
+
+        for (final File archiveFile : files) {
+          if (!archiveFile.isDirectory()) {
+            try {
+              final ViewConfig viewConfig = archiveUtility.getViewConfigFromArchive(archiveFile);
+
+              String commonName = viewConfig.getName();
+              String version = viewConfig.getVersion();
+              String viewName = ViewEntity.getViewName(commonName, version);
+
+              final String extractedArchiveDirPath = extractedArchivesPath + File.separator + viewName;
+              final File extractedArchiveDirFile = archiveUtility.getFile(extractedArchiveDirPath);
+
+              final ViewEntity viewDefinition = new ViewEntity(viewConfig, configuration, extractedArchiveDirPath);
+
+              boolean systemView = viewDefinition.isSystem();
+              if (!systemView) {
+                addDefinition(viewDefinition);
+                executorService.submit(new Runnable() {
+                  @Override
+                  public void run() {
+                    readViewArchive(viewDefinition, archiveFile, extractedArchiveDirFile, serverVersion);
+                    migrateDataFromPreviousVersion(viewDefinition, serverVersion);
+                  }
+                });
+              }
 
+            } catch (Exception e) {
+              String msg = "Caught exception reading view archive " + archiveFile.getAbsolutePath();
+              LOG.error(msg, e);
+            }
+          }
+        }
+      }
+
+    } catch (Exception e) {
+      LOG.error("Caught exception reading view archives.", e);
+    }
+
+  }
 
 
   // read the view archives.
@@ -1560,7 +1609,7 @@ public class ViewRegistry {
 
       if (extractor.ensureExtractedArchiveDirectory(extractedArchivesPath)) {
 
-        File[] files  = viewDir.listFiles();
+        File[] files = viewDir.listFiles();
 
         if (files != null) {
 
@@ -1575,8 +1624,8 @@ public class ViewRegistry {
                 final ViewConfig viewConfig = archiveUtility.getViewConfigFromArchive(archiveFile);
 
                 String commonName = viewConfig.getName();
-                String version    = viewConfig.getVersion();
-                String viewName   = ViewEntity.getViewName(commonName, version);
+                String version = viewConfig.getVersion();
+                String viewName = ViewEntity.getViewName(commonName, version);
 
                 if (!viewName.matches(viewNameRegExp)) {
                   continue;
@@ -1615,7 +1664,7 @@ public class ViewRegistry {
             }
           }
 
-          for(ViewEntity view : getDefinitions()) {
+          for (ViewEntity view : getDefinitions()) {
             if (view.getStatus() == ViewDefinition.ViewStatus.DEPLOYED) {
               // migrate views that are not need extraction, for ones that need call will be done in the runnable.
               migrateDataFromPreviousVersion(view, serverVersion);
@@ -1688,7 +1737,7 @@ public class ViewRegistry {
 
         LOG.info("View deployed: " + viewDefinition.getName() + ".");
       }
-    } catch (Exception e) {
+    } catch (Throwable e) {
       String msg = "Caught exception loading view " + viewDefinition.getName();
 
       setViewStatus(viewDefinition, ViewEntity.ViewStatus.ERROR, msg + " : " + e.getMessage());
@@ -1800,7 +1849,7 @@ public class ViewRegistry {
     for (org.apache.ambari.server.state.Cluster cluster : allClusters.values()) {
 
       String clusterName = cluster.getClusterName();
-      Long clusterId= cluster.getClusterId();
+      Long clusterId = cluster.getClusterId();
       StackId stackId = cluster.getCurrentStackVersion();
       Set<String> serviceNames = cluster.getServices().keySet();
 
@@ -1816,7 +1865,7 @@ public class ViewRegistry {
           }
         } catch (Exception e) {
           LOG.error("Can't auto create instance of view " + viewName + " for cluster " + clusterName +
-            ".  Caught exception :" + e.getMessage(), e);
+              ".  Caught exception :" + e.getMessage(), e);
         }
       }
     }
@@ -1829,7 +1878,7 @@ public class ViewRegistry {
    * assigned to one of the roles in the suppled set of role names.
    *
    * @param viewInstanceEntity a view instance entity
-   * @param roles the set of roles to use to for granting access
+   * @param roles              the set of roles to use to for granting access
    */
   @Transactional
   protected void setViewInstanceRoleAccess(ViewInstanceEntity viewInstanceEntity, Collection<String> roles) {
@@ -1869,16 +1918,15 @@ public class ViewRegistry {
    * Check the configured view max and min Ambari versions for the given view entity
    * against the given Ambari server version.
    *
-   * @param view           the view
-   * @param serverVersion  the server version
-   *
+   * @param view          the view
+   * @param serverVersion the server version
    * @return true if the given server version >= min version && <= max version for the given view
    */
   protected boolean checkViewVersions(ViewEntity view, String serverVersion) {
     ViewConfig config = view.getConfiguration();
 
     return checkViewVersion(view, config.getMinAmbariVersion(), serverVersion, "minimum", -1, "less than") &&
-           checkViewVersion(view, config.getMaxAmbariVersion(), serverVersion, "maximum", 1, "greater than");
+        checkViewVersion(view, config.getMaxAmbariVersion(), serverVersion, "maximum", 1, "greater than");
 
   }
 
@@ -1901,7 +1949,7 @@ public class ViewRegistry {
       int index = version.indexOf('*');
 
       int compVal = index == -1 ? VersionUtils.compareVersions(serverVersion, version) :
-                    index > 0 ? VersionUtils.compareVersions(serverVersion, version.substring(0, index), index) : 0;
+          index > 0 ? VersionUtils.compareVersions(serverVersion, version.substring(0, index), index) : 0;
 
       if (compVal == errValue) {
         String msg = "The Ambari server version " + serverVersion + " is " + errMsg + " the configured " + label +
@@ -1935,9 +1983,9 @@ public class ViewRegistry {
       throws Exception {
     Injector injector = Guice.createInjector(viewModule);
 
-    ViewExtractor      extractor      = injector.getInstance(ViewExtractor.class);
+    ViewExtractor extractor = injector.getInstance(ViewExtractor.class);
     ViewArchiveUtility archiveUtility = injector.getInstance(ViewArchiveUtility.class);
-    Configuration      configuration  = injector.getInstance(Configuration.class);
+    Configuration configuration = injector.getInstance(Configuration.class);
 
     File viewDir = configuration.getViewsDir();
 
@@ -1951,11 +1999,11 @@ public class ViewRegistry {
       ViewConfig viewConfig = archiveUtility.getViewConfigFromArchive(archiveFile);
 
       String commonName = viewConfig.getName();
-      String version    = viewConfig.getVersion();
-      String viewName   = ViewEntity.getViewName(commonName, version);
+      String version = viewConfig.getVersion();
+      String viewName = ViewEntity.getViewName(commonName, version);
 
       String extractedArchiveDirPath = extractedArchivesPath + File.separator + viewName;
-      File   extractedArchiveDirFile = archiveUtility.getFile(extractedArchiveDirPath);
+      File extractedArchiveDirFile = archiveUtility.getFile(extractedArchiveDirPath);
 
       if (!extractedArchiveDirFile.exists()) {
         ViewEntity viewDefinition = new ViewEntity(viewConfig, configuration, extractedArchiveDirPath);
@@ -1968,7 +2016,7 @@ public class ViewRegistry {
           } finally {
             if (classLoader != null && classLoader instanceof ViewClassLoader) {
               try {
-                ((ViewClassLoader)classLoader).close();
+                ((ViewClassLoader) classLoader).close();
               } catch (IOException e) {
               }
             }
@@ -2006,8 +2054,7 @@ public class ViewRegistry {
   /**
    * Factory method to create a view URL stream provider.
    *
-   * @param viewContext  the view context
-   *
+   * @param viewContext the view context
    * @return a new view URL stream provider
    */
   protected ViewURLStreamProvider createURLStreamProvider(ViewContext viewContext) {
@@ -2045,26 +2092,26 @@ public class ViewRegistry {
    * @param clusterId
    * @return
    */
-  protected AmbariStreamProvider createRemoteAmbariStreamProvider(Long clusterId){
+  protected AmbariStreamProvider createRemoteAmbariStreamProvider(Long clusterId) {
     RemoteAmbariClusterEntity clusterEntity = remoteAmbariClusterDAO.findById(clusterId);
-    if(clusterEntity != null) {
+    if (clusterEntity != null) {
       return new RemoteAmbariStreamProvider(getBaseurl(clusterEntity.getUrl()),
-        clusterEntity.getUsername(),clusterEntity.getPassword(),
-        configuration.getViewAmbariRequestConnectTimeout(),configuration.getViewAmbariRequestReadTimeout());
+          clusterEntity.getUsername(), clusterEntity.getPassword(),
+          configuration.getViewAmbariRequestConnectTimeout(), configuration.getViewAmbariRequestReadTimeout());
     }
     return null;
   }
 
   /**
-   *  Get baseurl of the cluster
-   *  baseurl wil be http://host:port
+   * Get baseurl of the cluster
+   * baseurl wil be http://host:port
    *
    * @param url will be in format like http://host:port/api/v1/clusters/clusterName
    * @return baseurl
    */
-  private String getBaseurl(String url){
+  private String getBaseurl(String url) {
     int index = url.indexOf(API_PREFIX);
-    return url.substring(0,index);
+    return url.substring(0, index);
   }
 
   /**
@@ -2074,7 +2121,7 @@ public class ViewRegistry {
    * Finds latest between unregistered instances and returns it.
    *
    * @param serverVersion server version
-   * @param instance view instance entity
+   * @param instance      view instance entity
    * @return latest unregistered instance of same name of same view.
    */
   private ViewInstanceEntity getLatestUnregisteredInstance(String serverVersion, ViewInstanceEntity instance)

http://git-wip-us.apache.org/repos/asf/ambari/blob/fbff7f7e/ambari-server/src/test/java/org/apache/ambari/server/topology/ClusterDeployWithStartOnlyTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/topology/ClusterDeployWithStartOnlyTest.java b/ambari-server/src/test/java/org/apache/ambari/server/topology/ClusterDeployWithStartOnlyTest.java
index 279340e..c9a3693 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/topology/ClusterDeployWithStartOnlyTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/topology/ClusterDeployWithStartOnlyTest.java
@@ -390,7 +390,7 @@ public class ClusterDeployWithStartOnlyTest {
     expectLastCall().once();
 
     expect(clusterController.ensureResourceProvider(anyObject(Resource.Type.class))).andReturn(resourceProvider);
-    expect(executor.submit(anyObject(AsyncCallableService.class))).andReturn(mockFuture);
+    expect(executor.submit(anyObject(AsyncCallableService.class))).andReturn(mockFuture).times(2);
 
     persistedTopologyRequest = new PersistedTopologyRequest(1, request);
     expect(persistedState.getAllRequests()).andReturn(Collections.<ClusterTopology,

http://git-wip-us.apache.org/repos/asf/ambari/blob/fbff7f7e/ambari-server/src/test/java/org/apache/ambari/server/topology/ClusterInstallWithoutStartOnComponentLevelTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/topology/ClusterInstallWithoutStartOnComponentLevelTest.java b/ambari-server/src/test/java/org/apache/ambari/server/topology/ClusterInstallWithoutStartOnComponentLevelTest.java
index 7ae4485..f72fce6 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/topology/ClusterInstallWithoutStartOnComponentLevelTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/topology/ClusterInstallWithoutStartOnComponentLevelTest.java
@@ -366,7 +366,7 @@ public class ClusterInstallWithoutStartOnComponentLevelTest {
     expectLastCall().once();
 
     expect(clusterController.ensureResourceProvider(anyObject(Resource.Type.class))).andReturn(resourceProvider);
-    expect(executor.submit(anyObject(AsyncCallableService.class))).andReturn(mockFuture);
+    expect(executor.submit(anyObject(AsyncCallableService.class))).andReturn(mockFuture).times(2);
 
     persistedTopologyRequest = new PersistedTopologyRequest(1, request);
     expect(persistedState.getAllRequests()).andReturn(Collections.<ClusterTopology,

http://git-wip-us.apache.org/repos/asf/ambari/blob/fbff7f7e/ambari-server/src/test/java/org/apache/ambari/server/topology/ClusterInstallWithoutStartTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/topology/ClusterInstallWithoutStartTest.java b/ambari-server/src/test/java/org/apache/ambari/server/topology/ClusterInstallWithoutStartTest.java
index 4a71699..42fff74 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/topology/ClusterInstallWithoutStartTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/topology/ClusterInstallWithoutStartTest.java
@@ -360,7 +360,7 @@ public class ClusterInstallWithoutStartTest {
     expectLastCall().once();
 
     expect(clusterController.ensureResourceProvider(anyObject(Resource.Type.class))).andReturn(resourceProvider);
-    expect(executor.submit(anyObject(AsyncCallableService.class))).andReturn(mockFuture);
+    expect(executor.submit(anyObject(AsyncCallableService.class))).andReturn(mockFuture).times(2);
 
     persistedTopologyRequest = new PersistedTopologyRequest(1, request);
     expect(persistedState.getAllRequests()).andReturn(Collections.<ClusterTopology,

http://git-wip-us.apache.org/repos/asf/ambari/blob/fbff7f7e/ambari-server/src/test/java/org/apache/ambari/server/view/ViewRegistryTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/view/ViewRegistryTest.java b/ambari-server/src/test/java/org/apache/ambari/server/view/ViewRegistryTest.java
index 6b560ee..c364e53 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/view/ViewRegistryTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/view/ViewRegistryTest.java
@@ -345,6 +345,7 @@ public class ViewRegistryTest {
     expect(configuration.getViewExtractionThreadPoolCoreSize()).andReturn(2).anyTimes();
     expect(configuration.getViewExtractionThreadPoolMaxSize()).andReturn(3).anyTimes();
     expect(configuration.getViewExtractionThreadPoolTimeout()).andReturn(10000L).anyTimes();
+    expect(configuration.extractViewsAfterClusterConfig()).andReturn(Boolean.FALSE).anyTimes();
 
     expect(viewDir.listFiles()).andReturn(new File[]{viewArchive});
 
@@ -574,6 +575,7 @@ public class ViewRegistryTest {
     expect(configuration.getViewExtractionThreadPoolCoreSize()).andReturn(2).anyTimes();
     expect(configuration.getViewExtractionThreadPoolMaxSize()).andReturn(3).anyTimes();
     expect(configuration.getViewExtractionThreadPoolTimeout()).andReturn(10000L).anyTimes();
+    expect(configuration.extractViewsAfterClusterConfig()).andReturn(Boolean.FALSE).anyTimes();
 
     expect(viewDir.listFiles()).andReturn(new File[]{viewArchive}).anyTimes();
 


[36/50] ambari git commit: AMBARI-19000. Ambari-server fails to restart with --debug if it is already running (aonishuk)

Posted by sw...@apache.org.
AMBARI-19000. Ambari-server fails to restart with --debug if it is already running (aonishuk)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/964c56e7
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/964c56e7
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/964c56e7

Branch: refs/heads/branch-feature-AMBARI-18901
Commit: 964c56e7a80e621103fe5ed318b3050d2c6fe07c
Parents: 1bd562a
Author: Andrew Onishuk <ao...@hortonworks.com>
Authored: Wed Nov 30 17:07:26 2016 +0200
Committer: Andrew Onishuk <ao...@hortonworks.com>
Committed: Wed Nov 30 17:07:26 2016 +0200

----------------------------------------------------------------------
 ambari-server/src/main/python/ambari-server.py          | 12 +++++++++++-
 .../src/main/python/ambari_server/serverUtils.py        | 11 +++++++++++
 2 files changed, 22 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/964c56e7/ambari-server/src/main/python/ambari-server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/python/ambari-server.py b/ambari-server/src/main/python/ambari-server.py
index ee6259b..ab26c0d 100755
--- a/ambari-server/src/main/python/ambari-server.py
+++ b/ambari-server/src/main/python/ambari-server.py
@@ -35,7 +35,7 @@ from ambari_commons.os_utils import remove_file
 from ambari_server.BackupRestore import main as BackupRestore_main
 from ambari_server.dbConfiguration import DATABASE_NAMES, LINUX_DBMS_KEYS_LIST
 from ambari_server.serverConfiguration import configDefaults, get_ambari_properties, PID_NAME
-from ambari_server.serverUtils import is_server_runing, refresh_stack_hash
+from ambari_server.serverUtils import is_server_runing, refresh_stack_hash, wait_for_server_to_stop
 from ambari_server.serverSetup import reset, setup, setup_jce_policy
 from ambari_server.serverUpgrade import upgrade, upgrade_stack, set_current
 from ambari_server.setupHttps import setup_https, setup_truststore
@@ -63,6 +63,8 @@ logger = logging.getLogger()
 
 formatstr = "%(levelname)s %(asctime)s %(filename)s:%(lineno)d - %(message)s"
 
+SERVER_STOP_TIMEOUT = 30
+
 class UserActionPossibleArgs(object):
   def __init__(self, i_fn, i_possible_args_numbers, *args, **kwargs):
     self.fn = i_fn
@@ -166,6 +168,14 @@ def stop(args):
     except OSError, e:
       print_info_msg("Unable to stop Ambari Server - " + str(e))
       return
+
+    print "Waiting for server stop..."
+    logger.info("Waiting for server stop...")
+
+    if not wait_for_server_to_stop(SERVER_STOP_TIMEOUT):
+      print "Ambari-server failed to stop"
+      logger.info("Ambari-server failed to stop")
+
     pid_file_path = os.path.join(configDefaults.PID_DIR, PID_NAME)
     os.remove(pid_file_path)
     print "Ambari Server stopped"

http://git-wip-us.apache.org/repos/asf/ambari/blob/964c56e7/ambari-server/src/main/python/ambari_server/serverUtils.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/python/ambari_server/serverUtils.py b/ambari-server/src/main/python/ambari_server/serverUtils.py
index 3af233c..4621646 100644
--- a/ambari-server/src/main/python/ambari_server/serverUtils.py
+++ b/ambari-server/src/main/python/ambari_server/serverUtils.py
@@ -19,6 +19,7 @@ limitations under the License.
 '''
 
 import os
+import time
 from ambari_commons.exceptions import FatalException, NonFatalException
 from ambari_commons.logging_utils import get_verbose
 from ambari_commons.os_family_impl import OsFamilyFuncImpl, OsFamilyImpl
@@ -62,6 +63,16 @@ def is_server_runing():
     return False, None
 
 
+def wait_for_server_to_stop(wait_timeout):
+  start_time = time.time()
+  is_timeout = lambda: time.time() - start_time > wait_timeout
+
+  while is_server_runing()[0] and not is_timeout():
+    time.sleep(0.1)
+
+  return not is_timeout()
+
+
 @OsFamilyFuncImpl(OSConst.WINSRV_FAMILY)
 def is_server_runing():
   from ambari_commons.os_windows import SERVICE_STATUS_STARTING, SERVICE_STATUS_RUNNING, SERVICE_STATUS_STOPPING, \


[18/50] ambari git commit: AMBARI-19002 Ambari widgets and service checks to honor LLAP clusters with 100% allocation to LLAP daemons. (atkach)

Posted by sw...@apache.org.
AMBARI-19002 Ambari widgets and service checks to honor LLAP clusters with 100% allocation to LLAP daemons. (atkach)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/616e97ea
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/616e97ea
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/616e97ea

Branch: refs/heads/branch-feature-AMBARI-18901
Commit: 616e97ea3655f54db3d1a19c2af43672ea971de2
Parents: 182479d
Author: Andrii Tkach <at...@apache.org>
Authored: Tue Nov 29 13:40:37 2016 +0200
Committer: Andrii Tkach <at...@apache.org>
Committed: Tue Nov 29 13:40:37 2016 +0200

----------------------------------------------------------------------
 .../stacks/HDP/2.0.6/configuration/cluster-env.xml |  6 ++++++
 ambari-web/app/views/main/dashboard/widgets.js     | 17 +++++++++++++++--
 .../test/views/main/dashboard/widgets_test.js      | 11 ++++++++++-
 3 files changed, 31 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/616e97ea/ambari-server/src/main/resources/stacks/HDP/2.0.6/configuration/cluster-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/configuration/cluster-env.xml b/ambari-server/src/main/resources/stacks/HDP/2.0.6/configuration/cluster-env.xml
index 0d313cc..cc6c8a3 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/configuration/cluster-env.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/configuration/cluster-env.xml
@@ -274,4 +274,10 @@ gpgcheck=0</value>
     <description>For properties handled by handle_mounted_dirs this will make Ambari </description>
     <on-ambari-upgrade add="true"/>
   </property>
+  <property>
+    <name>hide_yarn_memory_widget</name>
+    <value>false</value>
+    <description>YARN Memory widget should be hidden by default on the dashboard.</description>
+    <on-ambari-upgrade add="true"/>
+  </property>
 </configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/616e97ea/ambari-web/app/views/main/dashboard/widgets.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views/main/dashboard/widgets.js b/ambari-web/app/views/main/dashboard/widgets.js
index 48293d3..c5eaafc 100644
--- a/ambari-web/app/views/main/dashboard/widgets.js
+++ b/ambari-web/app/views/main/dashboard/widgets.js
@@ -116,6 +116,16 @@ App.MainDashboardWidgetsView = Em.View.extend(App.UserPref, App.LocalStorage, Ap
     }, this);
   },
 
+  resolveConfigDependencies: function(visibleFull, hiddenFull) {
+    var clusterEnv = App.router.get('clusterController.clusterEnv').properties;
+
+    if (clusterEnv['hide_yarn_memory_widget'] === 'true') {
+      hiddenFull.push(['20', 'YARN Memory']);
+    } else {
+      visibleFull.splice(visibleFull.indexOf('19'), 0, '20');
+    }
+  },
+
   /**
    * Load widget statuses to <code>initPrefObject</code>
    */
@@ -126,7 +136,7 @@ App.MainDashboardWidgetsView = Em.View.extend(App.UserPref, App.LocalStorage, Ap
       '6', '7', '8', '9', //host metrics
       '1', '5', '3', '10', //hdfs
       '13', '12', '14', '16', //hbase
-      '17', '18', '19', '20', '23', // all yarn
+      '17', '18', '19', '23', // all yarn
       '21', // storm
       '22', // flume
       '24', // hawq
@@ -135,6 +145,7 @@ App.MainDashboardWidgetsView = Em.View.extend(App.UserPref, App.LocalStorage, Ap
     var hiddenFull = [
       ['15', 'Region In Transition']
     ];
+    this.resolveConfigDependencies(visibleFull, hiddenFull);
 
     // Display widgets for host metrics if the stack definition has a host metrics service to display it.
     if (this.get('host_metrics_model') == null) {
@@ -155,7 +166,9 @@ App.MainDashboardWidgetsView = Em.View.extend(App.UserPref, App.LocalStorage, Ap
       hbase.forEach(function (item) {
         visibleFull = visibleFull.without(item);
       }, this);
-      hiddenFull = [];
+      hiddenFull = hiddenFull.filter(function(item) {
+        return item[0] !== '15';
+      });
     }
     if (this.get('yarn_model') == null) {
       var yarn = ['17', '18', '19', '20', '23'];

http://git-wip-us.apache.org/repos/asf/ambari/blob/616e97ea/ambari-web/test/views/main/dashboard/widgets_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/views/main/dashboard/widgets_test.js b/ambari-web/test/views/main/dashboard/widgets_test.js
index 49a06a4..0781c79 100644
--- a/ambari-web/test/views/main/dashboard/widgets_test.js
+++ b/ambari-web/test/views/main/dashboard/widgets_test.js
@@ -107,12 +107,21 @@ describe('App.MainDashboardWidgetsView', function () {
           yarn_model: {}
         },
         e: {
-          visibleL: totalWidgetsCount,
+          visibleL: totalWidgetsCount - 1,
           hiddenL: 1
         },
         m: 'All models are not null'
       }
     ]);
+
+    beforeEach(function() {
+      sinon.stub(view, 'resolveConfigDependencies');
+    });
+
+    afterEach(function() {
+      view.resolveConfigDependencies.restore();
+    });
+
     tests.forEach(function (test) {
       describe(test.m, function () {
 


[34/50] ambari git commit: AMBARI-19027. Manage JournalNode tweaks (onechiporenko)

Posted by sw...@apache.org.
AMBARI-19027. Manage JournalNode tweaks (onechiporenko)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/adde3db0
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/adde3db0
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/adde3db0

Branch: refs/heads/branch-feature-AMBARI-18901
Commit: adde3db032bf538ba4c872c931c46e28feef3fc1
Parents: 6729297
Author: Oleg Nechiporenko <on...@apache.org>
Authored: Wed Nov 30 13:19:43 2016 +0200
Committer: Oleg Nechiporenko <on...@apache.org>
Committed: Wed Nov 30 14:28:12 2016 +0200

----------------------------------------------------------------------
 ambari-web/app/messages.js                            | 12 +++++-------
 .../main/admin/highAvailability/journalNode/step2.hbs | 14 ++++++++------
 2 files changed, 13 insertions(+), 13 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/adde3db0/ambari-web/app/messages.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/messages.js b/ambari-web/app/messages.js
index e8517b2..7c3ccd3 100644
--- a/ambari-web/app/messages.js
+++ b/ambari-web/app/messages.js
@@ -1255,7 +1255,7 @@ Em.I18n.translations = {
   'admin.kerberos.disable.notice.completed': 'Services have been successfully tested without kerberos environment.',
   'admin.kerberos.wizard.step1.notice.inProgress': 'Please wait while cluster is being unkerberized',
 
-  'admin.manageJournalNode.label': 'Manage JournalNode',
+  'admin.manageJournalNode.label': 'Manage JournalNodes',
   'admin.manageJournalNode.wizard.header': 'Manage JournalNodes Wizard',
   'admin.manageJournalNode.wizard.step1.header': 'Assign JournalNodes',
   'admin.manageJournalNode.wizard.step2.header': 'Review',
@@ -1263,14 +1263,12 @@ Em.I18n.translations = {
   'admin.manageJournalNode.wizard.step4.header': 'Add/Remove JournalNodes',
   'admin.manageJournalNode.wizard.step5.header': 'Format JournalNodes',
   'admin.manageJournalNode.wizard.step6.header': 'Start Active NameNode',
-  'admin.manageJournalNode.wizard.step7.header': 'BootStrap StandBy NameNode',
+  'admin.manageJournalNode.wizard.step7.header': 'BootStrap Standby NameNode',
   'admin.manageJournalNode.wizard.step8.header': 'Start All Services',
 
   'admin.manageJournalNode.wizard.step1.body': 'Add, or Remove JournalNodes',
-  'admin.manageJournalNode.wizard.step3.confirm.config.body':'<div class="alert alert-info">' +
-  '<b>Review Configuration Changes.</b></br>' +
-  'The following lists the configuration changes that will be made by the Wizard to manage JournalNode. This information is for <b> review only </b>.' +
-  '</div>',
+  'admin.manageJournalNode.wizard.step3.confirm.config.body': '<b>Configuration Change Review.</b></br>' +
+  'As part of this process, configuration changes are required. Please review the changes below, and note that they are for <b>review only</b>.  Future steps in this wizard will update this configuration, and restart <b>all</b> services automatically.',
 
   'admin.manageJournalNode.wizard.step4.task0.title' : 'Stop Standby NameNode',
   'admin.manageJournalNode.wizard.step4.task1.title': 'Stop Services',
@@ -1294,7 +1292,7 @@ Em.I18n.translations = {
   'admin.manageJournalNode.wizard.progressPage.notice.inProgress': 'Please wait while JournalNodes are being deployed',
   'admin.manageJournalNode.wizard.step6.notice.inProgress': 'Please wait for related services to be started',
   'admin.manageJournalNode.wizard.step4.notice.inProgress': 'Please wait while JournalNodes are being deployed',
-  'admin.manageJournalNode.wizard.step8.notice.completed':'JournalNodes has been processed successfully.',
+  'admin.manageJournalNode.wizard.step8.notice.completed':'Completed update to JournalNodes.',
 
   'admin.manageJournalNode.wizard.step3.body':
   '<ol>' +

http://git-wip-us.apache.org/repos/asf/ambari/blob/adde3db0/ambari-web/app/templates/main/admin/highAvailability/journalNode/step2.hbs
----------------------------------------------------------------------
diff --git a/ambari-web/app/templates/main/admin/highAvailability/journalNode/step2.hbs b/ambari-web/app/templates/main/admin/highAvailability/journalNode/step2.hbs
index de4e248..456e9ed 100644
--- a/ambari-web/app/templates/main/admin/highAvailability/journalNode/step2.hbs
+++ b/ambari-web/app/templates/main/admin/highAvailability/journalNode/step2.hbs
@@ -69,12 +69,14 @@
 
 
 <div id="serviceConfig">
-    {{#if controller.isLoaded}}
-        {{{t admin.manageJournalNode.wizard.step3.confirm.config.body}}}
-        {{view App.ServiceConfigView isNotEditableBinding="controller.isNotEditable"}}
-    {{else}}
-        {{view App.SpinnerView}}
-    {{/if}}
+  {{#if controller.isLoaded}}
+    <div class="alert alert-info">
+      {{{t admin.manageJournalNode.wizard.step3.confirm.config.body}}}
+    </div>
+    {{view App.ServiceConfigView isNotEditableBinding="controller.isNotEditable"}}
+  {{else}}
+    {{view App.SpinnerView}}
+  {{/if}}
 </div>
 
 <div class="btn-area">


[37/50] ambari git commit: AMBARI-18949 Extend current quick link JSON with properties

Posted by sw...@apache.org.
AMBARI-18949 Extend current quick link JSON with properties

Change-Id: Ie994b82cea7abddcb6d4c32afc6b8597b608f025


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/eec985b1
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/eec985b1
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/eec985b1

Branch: refs/heads/branch-feature-AMBARI-18901
Commit: eec985b1675d35f407913c66b405bfee95527931
Parents: 964c56e
Author: Balazs Bence Sari <bs...@hortonworks.com>
Authored: Wed Nov 30 16:23:40 2016 +0100
Committer: Miklos Gergely <mg...@hortonworks.com>
Committed: Wed Nov 30 16:23:40 2016 +0100

----------------------------------------------------------------------
 .../ambari/server/state/quicklinks/Link.java    | 21 +++++++
 .../QuickLinksConfigurationModuleTest.java      | 36 +++++++++++
 .../child_quicklinks_with_properties.json       | 64 +++++++++++++++++++
 .../parent_quicklinks_with_properties.json      | 65 ++++++++++++++++++++
 4 files changed, 186 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/eec985b1/ambari-server/src/main/java/org/apache/ambari/server/state/quicklinks/Link.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/quicklinks/Link.java b/ambari-server/src/main/java/org/apache/ambari/server/state/quicklinks/Link.java
index 091926f..f7c14f3 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/quicklinks/Link.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/quicklinks/Link.java
@@ -18,6 +18,11 @@
 
 package org.apache.ambari.server.state.quicklinks;
 
+import java.util.ArrayList;
+import java.util.List;
+
+import javax.annotation.Nullable;
+
 import org.codehaus.jackson.annotate.JsonIgnoreProperties;
 import org.codehaus.jackson.annotate.JsonProperty;
 import org.codehaus.jackson.map.annotate.JsonSerialize;
@@ -46,6 +51,9 @@ public class Link{
   @JsonProperty("protocol")
   private Protocol protocol;
 
+  @JsonProperty("properties")
+  private List<String> properties;
+
   public String getName() {
     return name;
   }
@@ -102,6 +110,14 @@ public class Link{
     this.protocol = protocol;
   }
 
+  @Nullable
+  public List<String> getProperties() {
+    return properties;
+  }
+
+  public void setProperties(List<String> properties) {
+    this.properties = properties;
+  }
 
   public boolean isRemoved(){
     //treat a link as removed if the section only contains a name
@@ -132,5 +148,10 @@ public class Link{
     } else {
       port.mergetWithParent(parentLink.getPort());
     }
+
+    if (null == properties && null != parentLink.properties) {
+      properties = parentLink.properties;
+    }
   }
+
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/eec985b1/ambari-server/src/test/java/org/apache/ambari/server/stack/QuickLinksConfigurationModuleTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/stack/QuickLinksConfigurationModuleTest.java b/ambari-server/src/test/java/org/apache/ambari/server/stack/QuickLinksConfigurationModuleTest.java
index 38176aa..f44f741 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/stack/QuickLinksConfigurationModuleTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/stack/QuickLinksConfigurationModuleTest.java
@@ -18,6 +18,7 @@
 
 package org.apache.ambari.server.stack;
 
+import com.google.common.collect.Lists;
 import org.apache.ambari.server.AmbariException;
 import org.apache.ambari.server.state.quicklinks.Check;
 import org.apache.ambari.server.state.quicklinks.Link;
@@ -28,7 +29,10 @@ import org.apache.ambari.server.state.quicklinks.QuickLinksConfiguration;
 import org.junit.Test;
 
 import java.io.File;
+import java.util.ArrayList;
+import java.util.HashMap;
 import java.util.List;
+import java.util.Map;
 
 import static org.junit.Assert.*;
 
@@ -111,6 +115,38 @@ public class QuickLinksConfigurationModuleTest {
     }
   }
 
+  @Test
+  public void testResolveOverrideProperties() throws Exception{
+    QuickLinks[] results = resolveQuickLinks("parent_quicklinks_with_properties.json",
+        "child_quicklinks_with_properties.json");
+    QuickLinks parentQuickLinks = results[0];
+    QuickLinks childQuickLinks = results[1];
+
+    //resolved quicklinks configuration
+    QuickLinksConfiguration childQuickLinksConfig = childQuickLinks.getQuickLinksConfiguration();
+    assertNotNull(childQuickLinksConfig);
+
+    //links
+    List<Link> links = childQuickLinksConfig.getLinks();
+    assertNotNull(links);
+    assertEquals(3, links.size());
+    Map<String, Link> linksByName = new HashMap<>();
+    for (Link link: links) {
+      linksByName.put(link.getName(), link);
+    }
+    assertEquals("Links are not properly overridden for foo_ui",
+        Lists.newArrayList("authenticated", "sso"),
+        linksByName.get("foo_ui").getProperties());
+    assertEquals("Parent links for foo_jmx are not inherited.",
+        Lists.newArrayList("authenticated"),
+        linksByName.get("foo_jmx").getProperties());
+    assertEquals("Links are not properly overridden for foo_logs",
+        new ArrayList<>(),
+        linksByName.get("foo_logs").getProperties());
+
+  }
+
+
   private QuickLinks[] resolveQuickLinks(String parentJson, String childJson) throws AmbariException{
     File parentQuiclinksFile = new File(this.getClass().getClassLoader().getResource(parentJson).getFile());
     File childQuickLinksFile = new File(this.getClass().getClassLoader().getResource(childJson).getFile());

http://git-wip-us.apache.org/repos/asf/ambari/blob/eec985b1/ambari-server/src/test/resources/child_quicklinks_with_properties.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/resources/child_quicklinks_with_properties.json b/ambari-server/src/test/resources/child_quicklinks_with_properties.json
new file mode 100644
index 0000000..36cd4f3
--- /dev/null
+++ b/ambari-server/src/test/resources/child_quicklinks_with_properties.json
@@ -0,0 +1,64 @@
+{
+  "name": "default",
+  "description": "default quick links configuration",
+  "configuration": {
+    "protocol":
+    {
+      "type":"http",
+      "checks":[
+        {
+          "property":"foo.http.policy",
+          "desired":"HTTP_ONLY",
+          "site":"foo-site"
+        }
+      ]
+    },
+
+    "links": [
+      {
+        "name": "foo_ui",
+        "label": "Foo UI",
+        "requires_user_name": "false",
+        "url": "%@://%@:%@",
+        "properties": ["authenticated", "sso"],
+        "port":{
+          "http_property": "foo.ui.webapp.address",
+          "http_default_port": "19888",
+          "https_property": "foo.ui.webapp.https.address",
+          "https_default_port": "8090",
+          "regex": "\\w*:(\\d+)",
+          "site": "foo-site"
+        }
+      },
+      {
+        "name":"foo_jmx",
+        "label":"Foo JMX",
+        "requires_user_name":"false",
+        "url":"%@://%@:%@/jmx",
+        "port":{
+          "http_property": "foo.jmx.webapp.address",
+          "http_default_port": "19888",
+          "https_property": "foo.jmx.webapp.https.address",
+          "https_default_port": "8090",
+          "regex": "\\w*:(\\d+)",
+          "site": "foo-site"
+        }
+      },
+      {
+        "name": "foo_logs",
+        "label": "Foo logs",
+        "requires_user_name": "false",
+        "url": "%@://%@:%@/logs",
+        "properties": [],
+        "port":{
+          "http_property": "foo.logs.webapp.address",
+          "http_default_port": "19888",
+          "https_property": "foo.webapp.https.address",
+          "https_default_port": "8090",
+          "regex": "\\w*:(\\d+)",
+          "site": "foo-site"
+        }
+      }
+    ]
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/eec985b1/ambari-server/src/test/resources/parent_quicklinks_with_properties.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/resources/parent_quicklinks_with_properties.json b/ambari-server/src/test/resources/parent_quicklinks_with_properties.json
new file mode 100644
index 0000000..a315f3f
--- /dev/null
+++ b/ambari-server/src/test/resources/parent_quicklinks_with_properties.json
@@ -0,0 +1,65 @@
+{
+  "name": "default",
+  "description": "default quick links configuration",
+  "configuration": {
+    "protocol":
+    {
+      "type":"http",
+      "checks":[
+        {
+          "property":"foo.http.policy",
+          "desired":"HTTP_ONLY",
+          "site":"foo-site"
+        }
+      ]
+    },
+
+    "links": [
+      {
+        "name": "foo_ui",
+        "label": "Foo UI",
+        "requires_user_name": "false",
+        "url": "%@://%@:%@",
+        "properties": ["authenticated"],
+        "port":{
+          "http_property": "foo.ui.webapp.address",
+          "http_default_port": "19888",
+          "https_property": "foo.ui.webapp.https.address",
+          "https_default_port": "8090",
+          "regex": "\\w*:(\\d+)",
+          "site": "foo-site"
+        }
+      },
+      {
+        "name":"foo_jmx",
+        "label":"Foo JMX",
+        "requires_user_name":"false",
+        "url":"%@://%@:%@/jmx",
+        "properties": ["authenticated"],
+        "port":{
+          "http_property": "foo.jmx.webapp.address",
+          "http_default_port": "19888",
+          "https_property": "foo.jmx.webapp.https.address",
+          "https_default_port": "8090",
+          "regex": "\\w*:(\\d+)",
+          "site": "foo-site"
+        }
+      },
+      {
+        "name": "foo_logs",
+        "label": "Foo logs",
+        "requires_user_name": "false",
+        "url": "%@://%@:%@/logs",
+        "properties": ["authenticated"],
+        "port":{
+          "http_property": "foo.logs.webapp.address",
+          "http_default_port": "19888",
+          "https_property": "foo.webapp.https.address",
+          "https_default_port": "8090",
+          "regex": "\\w*:(\\d+)",
+          "site": "foo-site"
+        }
+      }
+    ]
+  }
+}
\ No newline at end of file


[20/50] ambari git commit: AMBARI-18996. Abillity to enable SPNEGO on Log Search Portal UI from Ambari (oleewere)

Posted by sw...@apache.org.
AMBARI-18996. Abillity to enable SPNEGO on Log Search Portal UI from Ambari (oleewere)

Change-Id: Ieb7f4327a42de3d506ffc85fd35177ca50fe084f


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/1fca37d9
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/1fca37d9
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/1fca37d9

Branch: refs/heads/branch-feature-AMBARI-18901
Commit: 1fca37d98d4f025967ca94ce2fc92f5ba26bdc46
Parents: 2591ba1
Author: oleewere <ol...@gmail.com>
Authored: Tue Nov 29 13:50:19 2016 +0100
Committer: oleewere <ol...@gmail.com>
Committed: Tue Nov 29 15:48:42 2016 +0100

----------------------------------------------------------------------
 .../LogsearchKRBAuthenticationFilter.java       |  4 +-
 .../src/main/resources/logsearch.properties     |  2 +-
 .../configuration/logsearch-properties.xml      | 40 ++++++++++++++++++++
 ambari-web/app/data/HDP2/site_properties.js     | 28 ++++++++++++++
 4 files changed, 71 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/1fca37d9/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/filters/LogsearchKRBAuthenticationFilter.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/filters/LogsearchKRBAuthenticationFilter.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/filters/LogsearchKRBAuthenticationFilter.java
index 808320d..fdda542 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/filters/LogsearchKRBAuthenticationFilter.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/filters/LogsearchKRBAuthenticationFilter.java
@@ -72,7 +72,7 @@ public class LogsearchKRBAuthenticationFilter extends LogsearchKrbFilter {
   private static final String PRINCIPAL = "logsearch.spnego.kerberos.principal";
   private static final String KEYTAB = "logsearch.spnego.kerberos.keytab";
   private static final String HOST_NAME = "logsearch.spnego.kerberos.host";
-  private static final String KERBEROS_ENABLE="logsearch.spnego.kerberos.enable";
+  private static final String KERBEROS_ENABLED = "logsearch.spnego.kerberos.enabled";
 
   private static final String NAME_RULES_PARAM = "kerberos.name.rules";
   private static final String TOKEN_VALID_PARAM = "token.validity";
@@ -230,7 +230,7 @@ public class LogsearchKRBAuthenticationFilter extends LogsearchKrbFilter {
   }
 
   private void isSpnegoEnable() {
-    spnegoEnable = PropertiesHelper.getBooleanProperty(KERBEROS_ENABLE, false);
+    spnegoEnable = PropertiesHelper.getBooleanProperty(KERBEROS_ENABLED, false);
     if (spnegoEnable) {
       spnegoEnable = false;
       String keytab = PropertiesHelper.getProperty(KEYTAB);

http://git-wip-us.apache.org/repos/asf/ambari/blob/1fca37d9/ambari-logsearch/ambari-logsearch-portal/src/main/resources/logsearch.properties
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/resources/logsearch.properties b/ambari-logsearch/ambari-logsearch-portal/src/main/resources/logsearch.properties
index 7641b0c..b6cc4a5 100755
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/resources/logsearch.properties
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/resources/logsearch.properties
@@ -57,7 +57,7 @@ logsearch.solr.jaas.file=/usr/lib/ambari-logsearch-portal/logsearch_solr_jaas.co
 
 
 #portal Kerberos 
-logsearch.spnego.kerberos.enable=false
+logsearch.spnego.kerberos.enabled=false
 logsearch.spnego.kerberos.keytab=
 logsearch.spnego.kerberos.principal=
 logsearch.spnego.kerberos.host=
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/1fca37d9/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/configuration/logsearch-properties.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/configuration/logsearch-properties.xml b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/configuration/logsearch-properties.xml
index 9dcd5cf..10ca9ae 100644
--- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/configuration/logsearch-properties.xml
+++ b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/configuration/logsearch-properties.xml
@@ -236,4 +236,44 @@
     </value-attributes>
     <on-ambari-upgrade add="true"/>
   </property>
+  <property>
+    <name>logsearch.spnego.kerberos.enabled</name>
+    <value/>
+    <display-name>Http Spnego enabled</display-name>
+    <description>Enable SPNEGO based authentication on the Log Search UI</description>
+    <value-attributes>
+      <type>boolean</type>
+    </value-attributes>
+    <on-ambari-upgrade add="true"/>
+  </property>
+  <property>
+    <name>logsearch.spnego.kerberos.principal</name>
+    <value/>
+    <display-name>Http Spnego principal</display-name>
+    <description>Principal for SPNEGO authentication for Http requests</description>
+    <value-attributes>
+      <empty-value-valid>true</empty-value-valid>
+    </value-attributes>
+    <on-ambari-upgrade add="true"/>
+  </property>
+  <property>
+    <name>logsearch.spnego.kerberos.keytab</name>
+    <value/>
+    <display-name>Http Spnego keytab</display-name>
+    <description>Keytab for SPNEGO authentication for Http requests</description>
+    <value-attributes>
+      <empty-value-valid>true</empty-value-valid>
+    </value-attributes>
+    <on-ambari-upgrade add="true"/>
+  </property>
+  <property>
+    <name>logsearch.spnego.kerberos.host</name>
+    <value>localhost</value>
+    <display-name>Http Spnego hostname</display-name>
+    <description>Fully-qualified host name in the Kerberos service principal name (SPN) that is used by SPNEGO</description>
+    <value-attributes>
+      <empty-value-valid>true</empty-value-valid>
+    </value-attributes>
+    <on-ambari-upgrade add="true"/>
+  </property>
 </configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/1fca37d9/ambari-web/app/data/HDP2/site_properties.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/data/HDP2/site_properties.js b/ambari-web/app/data/HDP2/site_properties.js
index d61d4aa..5b66506 100644
--- a/ambari-web/app/data/HDP2/site_properties.js
+++ b/ambari-web/app/data/HDP2/site_properties.js
@@ -2285,6 +2285,34 @@ var hdp2properties = [
     "category": "Advanced logsearch-properties",
     "index": 14
   },
+  {
+    "name": "logsearch.spnego.kerberos.enabled",
+    "serviceName": "LOGSEARCH",
+    "filename": "logsearch-properties.xml",
+    "category": "Advanced logsearch-properties",
+    "index": 15
+  },
+  {
+    "name": "logsearch.spnego.kerberos.keytab",
+    "serviceName": "LOGSEARCH",
+    "filename": "logsearch-properties.xml",
+    "category": "Advanced logsearch-properties",
+    "index": 16
+  },
+  {
+    "name": "logsearch.spnego.kerberos.principal",
+    "serviceName": "LOGSEARCH",
+    "filename": "logsearch-properties.xml",
+    "category": "Advanced logsearch-properties",
+    "index": 17
+  },
+  {
+    "name": "logsearch.spnego.kerberos.host",
+    "serviceName": "LOGSEARCH",
+    "filename": "logsearch-properties.xml",
+    "category": "Advanced logsearch-properties",
+    "index": 18
+  },
   /*infra-solr-client-log4j*/
   {
     "name": "infra_solr_client_log_dir",


[19/50] ambari git commit: AMBARI-18989. Log level filter labels not getting updated with host name filter (oleewere)

Posted by sw...@apache.org.
AMBARI-18989. Log level filter labels not getting updated with host name filter
 (oleewere)

Change-Id: I8f86056048f85f6a5de5763c59d68a9b023e5abb


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/2591ba11
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/2591ba11
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/2591ba11

Branch: refs/heads/branch-feature-AMBARI-18901
Commit: 2591ba11300a25264b5796868c3de2d448bfb380
Parents: 616e97e
Author: oleewere <ol...@gmail.com>
Authored: Fri Nov 25 16:30:37 2016 +0100
Committer: oleewere <ol...@gmail.com>
Committed: Tue Nov 29 15:15:41 2016 +0100

----------------------------------------------------------------------
 .../org/apache/ambari/logsearch/LogSearch.java  |  15 +--
 .../logsearch/common/LogSearchConstants.java    |   1 +
 .../AbstractOperationHolderConverter.java       |   9 ++
 ...actServiceLogRequestFacetQueryConverter.java |  44 +++++++
 .../BaseServiceLogRequestQueryConverter.java    |   1 +
 ...ServiceLogAnyGraphRequestQueryConverter.java |  13 +++
 ...eLogComponentLevelRequestQueryConverter.java |  15 +--
 ...eLogComponentRequestFacetQueryConverter.java |  15 +--
 ...rviceLogLevelCountRequestQueryConverter.java |  16 +++
 ...eLogLevelDateRangeRequestQueryConverter.java |  16 ++-
 ...erviceLogTreeRequestFacetQueryConverter.java |  17 +--
 .../ambari/logsearch/doc/DocConstants.java      |   1 +
 .../logsearch/graph/GraphDataGenerator.java     |  26 +++--
 .../logsearch/manager/ServiceLogsManager.java   |   2 +-
 .../request/ServiceLogParamDefinition.java      |   6 +
 .../request/impl/BaseServiceLogRequest.java     |  13 +++
 .../dashboard/BubbleGraphTableLayoutView.js     |   4 +-
 .../views/dashboard/ComponentListView.js        |   2 +-
 .../scripts/views/dashboard/HostListView.js     |  14 +--
 .../main/webapp/scripts/views/tabs/TreeView.js  |  10 +-
 .../webapp/templates/tabs/TreeView_tmpl.html    | 115 ++++++-------------
 ...ComponentLevelRequestQueryConverterTest.java |   7 +-
 ...ComponentRequestFacetQueryConverterTest.java |   4 +-
 23 files changed, 202 insertions(+), 164 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/2591ba11/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/LogSearch.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/LogSearch.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/LogSearch.java
index d4be790..2c3f4f5 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/LogSearch.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/LogSearch.java
@@ -63,12 +63,12 @@ public class LogSearch {
   private static final String HTTP_PROTOCOL = "http";
   private static final String HTTPS_PORT = "61889";
   private static final String HTTP_PORT = "61888";
-  
+
   private static final String WEB_RESOURCE_FOLDER = "webapps/app";
   private static final String ROOT_CONTEXT = "/";
   private static final Integer SESSION_TIMEOUT = 30;
 
- 
+
   public static void main(String[] argv) {
     LogSearch logSearch = new LogSearch();
     ManageStartEndTime.manage();
@@ -78,7 +78,7 @@ public class LogSearch {
       logger.error("Error running logsearch server", e);
     }
   }
-  
+
   public void run(String[] argv) throws Exception {
     Server server = buildSever(argv);
     HandlerList handlers = new HandlerList();
@@ -98,9 +98,10 @@ public class LogSearch {
 
   public Server buildSever(String argv[]) {
     Server server = new Server();
-    ServerConnector connector = new ServerConnector(server);
     boolean portSpecified = argv.length > 0;
     String protcolProperty = PropertiesHelper.getProperty(LOGSEARCH_PROTOCOL_PROP,HTTP_PROTOCOL);
+    HttpConfiguration httpConfiguration = new HttpConfiguration();
+    httpConfiguration.setRequestHeaderSize(65535);
     if (StringUtils.isEmpty(protcolProperty)) {
       protcolProperty = HTTP_PROTOCOL;
     }
@@ -109,18 +110,18 @@ public class LogSearch {
       logger.info("Building https server...........");
       port = portSpecified ? argv[0] : HTTPS_PORT;
       checkPort(Integer.parseInt(port));
-      HttpConfiguration https = new HttpConfiguration();
-      https.addCustomizer(new SecureRequestCustomizer());
+      httpConfiguration.addCustomizer(new SecureRequestCustomizer());
       SslContextFactory sslContextFactory = SSLUtil.getSslContextFactory();
       ServerConnector sslConnector = new ServerConnector(server,
           new SslConnectionFactory(sslContextFactory, "http/1.1"),
-          new HttpConnectionFactory(https));
+          new HttpConnectionFactory(httpConfiguration));
       sslConnector.setPort(Integer.parseInt(port));
       server.setConnectors(new Connector[] { sslConnector });
     } else {
       logger.info("Building http server...........");
       port = portSpecified ? argv[0] : HTTP_PORT;
       checkPort(Integer.parseInt(port));
+      ServerConnector connector = new ServerConnector(server, new HttpConnectionFactory(httpConfiguration));
       connector.setPort(Integer.parseInt(port));
       server.setConnectors(new Connector[] { connector });
     }

http://git-wip-us.apache.org/repos/asf/ambari/blob/2591ba11/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/LogSearchConstants.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/LogSearchConstants.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/LogSearchConstants.java
index fe31e6d..d1fc68d 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/LogSearchConstants.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/LogSearchConstants.java
@@ -102,6 +102,7 @@ public class LogSearchConstants {
   public static final String REQUEST_PARAM_FILTER_NAME = "filterName";
   public static final String REQUEST_PARAM_ROW_TYPE = "rowType";
   public static final String REQUEST_PARAM_UTC_OFFSET = "utcOffset";
+  public static final String REQUEST_PARAM_HOSTS = "hostList";
 
 
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/2591ba11/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/converter/AbstractOperationHolderConverter.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/converter/AbstractOperationHolderConverter.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/converter/AbstractOperationHolderConverter.java
index 3c91f51..b4c6943 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/converter/AbstractOperationHolderConverter.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/converter/AbstractOperationHolderConverter.java
@@ -33,6 +33,7 @@ import org.springframework.data.solr.core.query.SimpleFilterQuery;
 import org.springframework.data.solr.core.query.SimpleStringCriteria;
 
 import javax.inject.Inject;
+import java.util.Arrays;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
@@ -75,6 +76,14 @@ public abstract class AbstractOperationHolderConverter <REQUEST_TYPE, QUERY_TYPE
     return this.addInFilterQuery(query, field, values, false);
   }
 
+  public Query addInFiltersIfNotNullAndEnabled(Query query, String value, String field, boolean condition) {
+    if (value != null && condition) {
+      List<String> values = value.length() == 0 ? Arrays.asList("-1") : splitValueAsList(value, ",");
+      addInFilterQuery(query, field, values);
+    }
+    return query;
+  }
+
   public Query addInFilterQuery(Query query, String field, List<String> values, boolean negate) {
     if (CollectionUtils.isNotEmpty(values)) {
       addFilterQuery(query, new Criteria(field).is(values), negate);

http://git-wip-us.apache.org/repos/asf/ambari/blob/2591ba11/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/converter/AbstractServiceLogRequestFacetQueryConverter.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/converter/AbstractServiceLogRequestFacetQueryConverter.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/converter/AbstractServiceLogRequestFacetQueryConverter.java
new file mode 100644
index 0000000..0fc12e8
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/converter/AbstractServiceLogRequestFacetQueryConverter.java
@@ -0,0 +1,44 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.converter;
+
+import org.apache.ambari.logsearch.model.request.impl.BaseServiceLogRequest;
+import org.apache.ambari.logsearch.util.SolrUtil;
+import org.apache.commons.lang.StringUtils;
+import org.springframework.data.solr.core.query.SimpleFacetQuery;
+
+import static org.apache.ambari.logsearch.solr.SolrConstants.ServiceLogConstants.BUNDLE_ID;
+import static org.apache.ambari.logsearch.solr.SolrConstants.ServiceLogConstants.COMPONENT;
+import static org.apache.ambari.logsearch.solr.SolrConstants.ServiceLogConstants.HOST;
+import static org.apache.ambari.logsearch.solr.SolrConstants.ServiceLogConstants.LEVEL;
+import static org.apache.ambari.logsearch.solr.SolrConstants.ServiceLogConstants.PATH;
+
+public abstract class AbstractServiceLogRequestFacetQueryConverter<SOURCE extends BaseServiceLogRequest>
+  extends AbstractLogRequestFacetQueryConverter<SOURCE> {
+
+  @Override
+  public void appendFacetQuery(SimpleFacetQuery facetQuery, SOURCE request) {
+    addEqualsFilterQuery(facetQuery, HOST, SolrUtil.escapeQueryChars(request.getHostName()));
+    addEqualsFilterQuery(facetQuery, PATH, SolrUtil.escapeQueryChars(request.getFileName()));
+    addEqualsFilterQuery(facetQuery, COMPONENT, SolrUtil.escapeQueryChars(request.getComponentName()));
+    addEqualsFilterQuery(facetQuery, BUNDLE_ID, request.getBundleId());
+    addInFiltersIfNotNullAndEnabled(facetQuery, request.getLevel(), LEVEL, true);
+    addInFiltersIfNotNullAndEnabled(facetQuery, request.getHostList(), HOST, StringUtils.isEmpty(request.getHostName()));
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/2591ba11/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/converter/BaseServiceLogRequestQueryConverter.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/converter/BaseServiceLogRequestQueryConverter.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/converter/BaseServiceLogRequestQueryConverter.java
index 2601f72..a1f6e75 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/converter/BaseServiceLogRequestQueryConverter.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/converter/BaseServiceLogRequestQueryConverter.java
@@ -53,6 +53,7 @@ public class BaseServiceLogRequestQueryConverter extends AbstractServiceLogReque
     if (CollectionUtils.isNotEmpty(levels)){
       addInFilterQuery(query, LEVEL, levels);
     }
+    addInFiltersIfNotNullAndEnabled(query, request.getHostList(), HOST, org.apache.commons.lang.StringUtils.isEmpty(request.getHostName()));
     addRangeFilter(query, LOGTIME, request.getFrom(), request.getTo());
     return query;
   }

http://git-wip-us.apache.org/repos/asf/ambari/blob/2591ba11/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/converter/ServiceLogAnyGraphRequestQueryConverter.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/converter/ServiceLogAnyGraphRequestQueryConverter.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/converter/ServiceLogAnyGraphRequestQueryConverter.java
index 1d2a657..57bac10 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/converter/ServiceLogAnyGraphRequestQueryConverter.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/converter/ServiceLogAnyGraphRequestQueryConverter.java
@@ -20,12 +20,20 @@ package org.apache.ambari.logsearch.converter;
 
 import org.apache.ambari.logsearch.common.LogType;
 import org.apache.ambari.logsearch.model.request.impl.ServiceAnyGraphRequest;
+import org.apache.ambari.logsearch.model.request.impl.ServiceLogLevelCountRequest;
+import org.apache.commons.lang.StringUtils;
+import org.springframework.data.solr.core.query.Criteria;
 import org.springframework.data.solr.core.query.FacetOptions;
+import org.springframework.data.solr.core.query.SimpleFacetQuery;
+import org.springframework.data.solr.core.query.SimpleFilterQuery;
 
+import static org.apache.ambari.logsearch.solr.SolrConstants.ServiceLogConstants.HOST;
 import static org.apache.ambari.logsearch.solr.SolrConstants.ServiceLogConstants.LOGTIME;
 import static org.apache.ambari.logsearch.solr.SolrConstants.ServiceLogConstants.LEVEL;
 
 import javax.inject.Named;
+import java.util.Arrays;
+import java.util.List;
 
 @Named
 public class ServiceLogAnyGraphRequestQueryConverter extends AbstractLogRequestFacetQueryConverter<ServiceAnyGraphRequest>{
@@ -49,4 +57,9 @@ public class ServiceLogAnyGraphRequestQueryConverter extends AbstractLogRequestF
   public LogType getLogType() {
     return LogType.SERVICE;
   }
+
+  @Override
+  public void appendFacetQuery(SimpleFacetQuery facetQuery, ServiceAnyGraphRequest request) {
+    addInFiltersIfNotNullAndEnabled(facetQuery, request.getHostList(), HOST, StringUtils.isEmpty(request.getHostName()));
+  }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/2591ba11/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/converter/ServiceLogComponentLevelRequestQueryConverter.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/converter/ServiceLogComponentLevelRequestQueryConverter.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/converter/ServiceLogComponentLevelRequestQueryConverter.java
index cefc029..3b4bb78 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/converter/ServiceLogComponentLevelRequestQueryConverter.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/converter/ServiceLogComponentLevelRequestQueryConverter.java
@@ -18,23 +18,18 @@
  */
 package org.apache.ambari.logsearch.converter;
 
-import com.google.common.base.Splitter;
 import org.apache.ambari.logsearch.common.LogType;
 import org.apache.ambari.logsearch.model.request.impl.ServiceLogComponentLevelRequest;
-import org.springframework.data.solr.core.query.Criteria;
 import org.springframework.data.solr.core.query.FacetOptions;
-import org.springframework.data.solr.core.query.SimpleFacetQuery;
-import org.springframework.data.solr.core.query.SimpleFilterQuery;
 
 import javax.inject.Named;
-import java.util.List;
 
 import static org.apache.ambari.logsearch.solr.SolrConstants.ServiceLogConstants.COMPONENT;
 import static org.apache.ambari.logsearch.solr.SolrConstants.ServiceLogConstants.LEVEL;
 import static org.apache.ambari.logsearch.solr.SolrConstants.ServiceLogConstants.LOGTIME;
 
 @Named
-public class ServiceLogComponentLevelRequestQueryConverter extends AbstractLogRequestFacetQueryConverter<ServiceLogComponentLevelRequest> {
+public class ServiceLogComponentLevelRequestQueryConverter extends AbstractServiceLogRequestFacetQueryConverter<ServiceLogComponentLevelRequest> {
 
   @Override
   public FacetOptions.FacetSort getFacetSort() {
@@ -47,14 +42,6 @@ public class ServiceLogComponentLevelRequestQueryConverter extends AbstractLogRe
   }
 
   @Override
-  public void appendFacetQuery(SimpleFacetQuery facetQuery, ServiceLogComponentLevelRequest request) {
-    List<String> levels = Splitter.on(",").splitToList(request.getLevel()); // TODO: add @Valid on request object to make sure not throw exception if levels are missing
-    SimpleFilterQuery filterQuery = new SimpleFilterQuery();
-    filterQuery.addCriteria(new Criteria(LEVEL).in(levels));
-    facetQuery.addFilterQuery(filterQuery);
-  }
-
-  @Override
   public void appendFacetOptions(FacetOptions facetOptions, ServiceLogComponentLevelRequest request) {
     facetOptions.addFacetOnPivot(COMPONENT, LEVEL);
   }

http://git-wip-us.apache.org/repos/asf/ambari/blob/2591ba11/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/converter/ServiceLogComponentRequestFacetQueryConverter.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/converter/ServiceLogComponentRequestFacetQueryConverter.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/converter/ServiceLogComponentRequestFacetQueryConverter.java
index afdac70..6271ca8 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/converter/ServiceLogComponentRequestFacetQueryConverter.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/converter/ServiceLogComponentRequestFacetQueryConverter.java
@@ -18,16 +18,11 @@
  */
 package org.apache.ambari.logsearch.converter;
 
-import com.google.common.base.Splitter;
 import org.apache.ambari.logsearch.common.LogType;
 import org.apache.ambari.logsearch.model.request.impl.ServiceLogComponentHostRequest;
-import org.springframework.data.solr.core.query.Criteria;
 import org.springframework.data.solr.core.query.FacetOptions;
-import org.springframework.data.solr.core.query.SimpleFacetQuery;
-import org.springframework.data.solr.core.query.SimpleFilterQuery;
 
 import javax.inject.Named;
-import java.util.List;
 
 import static org.apache.ambari.logsearch.solr.SolrConstants.ServiceLogConstants.COMPONENT;
 import static org.apache.ambari.logsearch.solr.SolrConstants.ServiceLogConstants.HOST;
@@ -35,7 +30,7 @@ import static org.apache.ambari.logsearch.solr.SolrConstants.ServiceLogConstants
 import static org.apache.ambari.logsearch.solr.SolrConstants.ServiceLogConstants.LOGTIME;
 
 @Named
-public class ServiceLogComponentRequestFacetQueryConverter extends AbstractLogRequestFacetQueryConverter<ServiceLogComponentHostRequest> {
+public class ServiceLogComponentRequestFacetQueryConverter extends AbstractServiceLogRequestFacetQueryConverter<ServiceLogComponentHostRequest> {
 
   @Override
   public FacetOptions.FacetSort getFacetSort() {
@@ -53,14 +48,6 @@ public class ServiceLogComponentRequestFacetQueryConverter extends AbstractLogRe
   }
 
   @Override
-  public void appendFacetQuery(SimpleFacetQuery facetQuery, ServiceLogComponentHostRequest request) {
-    List<String> levels = Splitter.on(",").splitToList(request.getLevel());
-    SimpleFilterQuery filterQuery = new SimpleFilterQuery();
-    filterQuery.addCriteria(new Criteria(LEVEL).in(levels));
-    facetQuery.addFilterQuery(filterQuery);
-  }
-
-  @Override
   public void appendFacetOptions(FacetOptions facetOptions, ServiceLogComponentHostRequest request) {
     facetOptions.addFacetOnPivot(COMPONENT, HOST, LEVEL);
     facetOptions.addFacetOnPivot(COMPONENT, LEVEL);

http://git-wip-us.apache.org/repos/asf/ambari/blob/2591ba11/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/converter/ServiceLogLevelCountRequestQueryConverter.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/converter/ServiceLogLevelCountRequestQueryConverter.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/converter/ServiceLogLevelCountRequestQueryConverter.java
index 18fd204..982d2a1 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/converter/ServiceLogLevelCountRequestQueryConverter.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/converter/ServiceLogLevelCountRequestQueryConverter.java
@@ -20,12 +20,19 @@ package org.apache.ambari.logsearch.converter;
 
 import org.apache.ambari.logsearch.common.LogType;
 import org.apache.ambari.logsearch.model.request.impl.ServiceLogLevelCountRequest;
+import org.apache.ambari.logsearch.util.SolrUtil;
+import org.apache.commons.lang.StringUtils;
 import org.springframework.data.solr.core.query.FacetOptions;
+import org.springframework.data.solr.core.query.SimpleFacetQuery;
 
 import javax.inject.Named;
 
+import static org.apache.ambari.logsearch.solr.SolrConstants.ServiceLogConstants.BUNDLE_ID;
+import static org.apache.ambari.logsearch.solr.SolrConstants.ServiceLogConstants.COMPONENT;
+import static org.apache.ambari.logsearch.solr.SolrConstants.ServiceLogConstants.HOST;
 import static org.apache.ambari.logsearch.solr.SolrConstants.ServiceLogConstants.LOGTIME;
 import static org.apache.ambari.logsearch.solr.SolrConstants.ServiceLogConstants.LEVEL;
+import static org.apache.ambari.logsearch.solr.SolrConstants.ServiceLogConstants.PATH;
 
 @Named
 public class ServiceLogLevelCountRequestQueryConverter extends AbstractLogRequestFacetQueryConverter<ServiceLogLevelCountRequest> {
@@ -49,4 +56,13 @@ public class ServiceLogLevelCountRequestQueryConverter extends AbstractLogReques
   public LogType getLogType() {
     return LogType.SERVICE;
   }
+
+  @Override
+  public void appendFacetQuery(SimpleFacetQuery facetQuery, ServiceLogLevelCountRequest request) {
+    addEqualsFilterQuery(facetQuery, HOST, SolrUtil.escapeQueryChars(request.getHostName()));
+    addEqualsFilterQuery(facetQuery, PATH, SolrUtil.escapeQueryChars(request.getFileName()));
+    addEqualsFilterQuery(facetQuery, COMPONENT, SolrUtil.escapeQueryChars(request.getComponentName()));
+    addEqualsFilterQuery(facetQuery, BUNDLE_ID, request.getBundleId());
+    addInFiltersIfNotNullAndEnabled(facetQuery, request.getHostList(), HOST, StringUtils.isEmpty(request.getHostName()));
+  }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/2591ba11/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/converter/ServiceLogLevelDateRangeRequestQueryConverter.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/converter/ServiceLogLevelDateRangeRequestQueryConverter.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/converter/ServiceLogLevelDateRangeRequestQueryConverter.java
index 2ff528d..b91a67d 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/converter/ServiceLogLevelDateRangeRequestQueryConverter.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/converter/ServiceLogLevelDateRangeRequestQueryConverter.java
@@ -26,8 +26,10 @@ import org.apache.solr.client.solrj.SolrQuery;
 
 import javax.inject.Named;
 
+import java.util.Arrays;
 import java.util.List;
 
+import static org.apache.ambari.logsearch.solr.SolrConstants.ServiceLogConstants.HOST;
 import static org.apache.ambari.logsearch.solr.SolrConstants.ServiceLogConstants.LEVEL;
 import static org.apache.ambari.logsearch.solr.SolrConstants.ServiceLogConstants.LOGTIME;
 
@@ -49,7 +51,19 @@ public class ServiceLogLevelDateRangeRequestQueryConverter extends AbstractDateR
     SolrQuery solrQuery = super.convert(request);
     if (StringUtils.isNotEmpty(request.getLevel())) {
       List<String> levels = Splitter.on(",").splitToList(request.getLevel());
-      solrQuery.addFilterQuery(String.format("%s:(%s)", LEVEL, StringUtils.join(levels, " OR ")));
+      if (levels.size() > 1) {
+        solrQuery.addFilterQuery(String.format("%s:(%s)", LEVEL, StringUtils.join(levels, " OR ")));
+      } else {
+        solrQuery.addFilterQuery(String.format("%s:%s", LEVEL, levels.get(0)));
+      }
+    }
+    if (request.getHostList() != null && StringUtils.isEmpty(request.getHostName())) {
+      List<String> hosts = request.getHostList().length() == 0 ? Arrays.asList("\\-1") : splitValueAsList(request.getHostList(), ",");
+      if (hosts.size() > 1) {
+        solrQuery.addFilterQuery(String.format("%s:(%s)", HOST, StringUtils.join(hosts, " OR ")));
+      } else {
+        solrQuery.addFilterQuery(String.format("%s:%s", HOST, hosts.get(0)));
+      }
     }
     return solrQuery;
   }

http://git-wip-us.apache.org/repos/asf/ambari/blob/2591ba11/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/converter/ServiceLogTreeRequestFacetQueryConverter.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/converter/ServiceLogTreeRequestFacetQueryConverter.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/converter/ServiceLogTreeRequestFacetQueryConverter.java
index f9cdb0b..7cb8f91 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/converter/ServiceLogTreeRequestFacetQueryConverter.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/converter/ServiceLogTreeRequestFacetQueryConverter.java
@@ -18,25 +18,18 @@
  */
 package org.apache.ambari.logsearch.converter;
 
-import com.google.common.base.Splitter;
 import org.apache.ambari.logsearch.common.LogType;
 import org.apache.ambari.logsearch.model.request.impl.ServiceLogHostComponentRequest;
-import org.springframework.data.solr.core.query.Criteria;
 import org.springframework.data.solr.core.query.FacetOptions;
-import org.springframework.data.solr.core.query.SimpleFacetQuery;
-import org.springframework.data.solr.core.query.SimpleFilterQuery;
-
 import javax.inject.Named;
 
-import java.util.List;
-
 import static org.apache.ambari.logsearch.solr.SolrConstants.ServiceLogConstants.COMPONENT;
 import static org.apache.ambari.logsearch.solr.SolrConstants.ServiceLogConstants.HOST;
 import static org.apache.ambari.logsearch.solr.SolrConstants.ServiceLogConstants.LEVEL;
 import static org.apache.ambari.logsearch.solr.SolrConstants.ServiceLogConstants.LOGTIME;
 
 @Named
-public class ServiceLogTreeRequestFacetQueryConverter extends AbstractLogRequestFacetQueryConverter<ServiceLogHostComponentRequest>{
+public class ServiceLogTreeRequestFacetQueryConverter extends AbstractServiceLogRequestFacetQueryConverter<ServiceLogHostComponentRequest>{
 
   @Override
   public FacetOptions.FacetSort getFacetSort() {
@@ -54,14 +47,6 @@ public class ServiceLogTreeRequestFacetQueryConverter extends AbstractLogRequest
   }
 
   @Override
-  public void appendFacetQuery(SimpleFacetQuery facetQuery, ServiceLogHostComponentRequest request) {
-    List<String> levels = Splitter.on(",").splitToList(request.getLevel());
-    SimpleFilterQuery filterQuery = new SimpleFilterQuery();
-    filterQuery.addCriteria(new Criteria(LEVEL).in(levels));
-    facetQuery.addFilterQuery(filterQuery);
-  }
-
-  @Override
   public void appendFacetOptions(FacetOptions facetOptions, ServiceLogHostComponentRequest request) {
     facetOptions.addFacetOnPivot(HOST, COMPONENT, LEVEL);
     facetOptions.addFacetOnPivot(HOST, LEVEL);

http://git-wip-us.apache.org/repos/asf/ambari/blob/2591ba11/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/doc/DocConstants.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/doc/DocConstants.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/doc/DocConstants.java
index 3639b88..805c5a0 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/doc/DocConstants.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/doc/DocConstants.java
@@ -71,6 +71,7 @@ public class DocConstants {
     public static final String NUMBER_ROWS_D = "Getting rows after particular log entry - used in 'Preview' option";
     public static final String SCROLL_TYPE_D = "Used in 'Preview' feature for getting records 'after' or 'before'";
     public static final String UTC_OFFSET_D = "timezone offset";
+    public static final String HOST_PARAMS_D = "filter for hosts";
   }
 
   public class ServiceOperationDescriptions {

http://git-wip-us.apache.org/repos/asf/ambari/blob/2591ba11/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/graph/GraphDataGenerator.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/graph/GraphDataGenerator.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/graph/GraphDataGenerator.java
index 741c523..75ac819 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/graph/GraphDataGenerator.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/graph/GraphDataGenerator.java
@@ -172,16 +172,16 @@ public class GraphDataGenerator {
   public BarGraphDataListResponse getGraphDataWithDefaults(QueryResponse queryResponse, String field, String[] defaults) {
     BarGraphDataListResponse response = new BarGraphDataListResponse();
     BarGraphData barGraphData = new BarGraphData();
-    List<NameValueData> nameValues = generateLevelCountData(queryResponse, defaults);
+    List<NameValueData> nameValues = generateLevelCountData(queryResponse, defaults, true);
     barGraphData.setName(field);
     barGraphData.setDataCount(nameValues);
     response.setGraphData(Lists.newArrayList(barGraphData));
     return response;
   }
 
-  public NameValueDataListResponse getNameValueDataListResponseWithDefaults(QueryResponse response, String[] defaults) {
+  public NameValueDataListResponse getNameValueDataListResponseWithDefaults(QueryResponse response, String[] defaults, boolean emptyResponseDisabled) {
     NameValueDataListResponse result = new NameValueDataListResponse();
-    result.setvNameValues(generateLevelCountData(response, defaults));
+    result.setvNameValues(generateLevelCountData(response, defaults, emptyResponseDisabled));
     return result;
   }
 
@@ -325,11 +325,11 @@ public class GraphDataGenerator {
     return extensionTree;
   }
 
-  private List<NameValueData> generateLevelCountData(QueryResponse queryResponse, String[] defaults) {
+  private List<NameValueData> generateLevelCountData(QueryResponse queryResponse, String[] defaults, boolean emptyResponseEnabled) {
     List<NameValueData> nameValues = Lists.newLinkedList();
     Map<String, NameValueData> linkedMap = Maps.newLinkedHashMap();
     List<Count> counts = generateCount(queryResponse);
-    if (!CollectionUtils.isNotEmpty(counts)) {
+    if (!CollectionUtils.isNotEmpty(counts) && emptyResponseEnabled) {
       return nameValues;
     }
     for (String defaultValue : defaults) {
@@ -338,13 +338,15 @@ public class GraphDataGenerator {
       nameValue.setValue("0");
       linkedMap.put(defaultValue, nameValue);
     }
-    for (Count count : counts) {
-      if (!linkedMap.containsKey(count.getName())) {
-        NameValueData nameValue = new NameValueData();
-        String name = count.getName().toUpperCase();
-        nameValue.setName(name);
-        nameValue.setValue(String.valueOf(count.getCount()));
-        linkedMap.put(name, nameValue);
+    if (CollectionUtils.isNotEmpty(counts)) {
+      for (Count count : counts) {
+        if (!linkedMap.containsKey(count.getName())) {
+          NameValueData nameValue = new NameValueData();
+          String name = count.getName().toUpperCase();
+          nameValue.setName(name);
+          nameValue.setValue(String.valueOf(count.getCount()));
+          linkedMap.put(name, nameValue);
+        }
       }
     }
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/2591ba11/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/ServiceLogsManager.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/ServiceLogsManager.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/ServiceLogsManager.java
index 8986544..fc2f0f7 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/ServiceLogsManager.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/ServiceLogsManager.java
@@ -209,7 +209,7 @@ public class ServiceLogsManager extends ManagerBase<SolrServiceLogData, ServiceL
   public NameValueDataListResponse getLogsLevelCount(ServiceLogLevelCountRequest request) {
     SimpleFacetQuery facetQuery = conversionService.convert(request, SimpleFacetQuery.class);
     QueryResponse response = serviceLogsSolrDao.process(facetQuery, "/service/logs/levels/counts");
-    return graphDataGenerator.getNameValueDataListResponseWithDefaults(response, LogSearchConstants.SUPPORTED_LOG_LEVELS);
+    return graphDataGenerator.getNameValueDataListResponseWithDefaults(response, LogSearchConstants.SUPPORTED_LOG_LEVELS, false);
   }
 
   public BarGraphDataListResponse getHistogramData(ServiceGraphRequest request) {

http://git-wip-us.apache.org/repos/asf/ambari/blob/2591ba11/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/ServiceLogParamDefinition.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/ServiceLogParamDefinition.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/ServiceLogParamDefinition.java
index 1783a8d..752f786 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/ServiceLogParamDefinition.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/ServiceLogParamDefinition.java
@@ -21,6 +21,7 @@ package org.apache.ambari.logsearch.model.request;
 import io.swagger.annotations.ApiParam;
 import org.apache.ambari.logsearch.common.LogSearchConstants;
 
+import static org.apache.ambari.logsearch.doc.DocConstants.ServiceDescriptions.HOST_PARAMS_D;
 import static org.apache.ambari.logsearch.doc.DocConstants.ServiceDescriptions.LEVEL_D;
 import static org.apache.ambari.logsearch.doc.DocConstants.ServiceDescriptions.HOST_NAME_D;
 import static org.apache.ambari.logsearch.doc.DocConstants.ServiceDescriptions.COMPONENT_NAME_D;
@@ -47,4 +48,9 @@ public interface ServiceLogParamDefinition {
 
   @ApiParam(value = FILE_NAME_D, name = LogSearchConstants.REQUEST_PARAM_FILE_NAME)
   void setFileName(String fileName);
+
+  String getHostList();
+
+  @ApiParam(value = HOST_PARAMS_D, name = LogSearchConstants.REQUEST_PARAM_HOSTS)
+  void setHostList(String hostList);
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/2591ba11/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/impl/BaseServiceLogRequest.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/impl/BaseServiceLogRequest.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/impl/BaseServiceLogRequest.java
index edd7563..bafca37 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/impl/BaseServiceLogRequest.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/impl/BaseServiceLogRequest.java
@@ -43,6 +43,9 @@ public class BaseServiceLogRequest extends BaseLogRequest
   @QueryParam(LogSearchConstants.REQUEST_PARAM_BUNDLE_ID)
   private String bundleId;
 
+  @QueryParam(LogSearchConstants.REQUEST_PARAM_HOSTS)
+  private String hostList;
+
   @Override
   public String getLevel() {
     return level;
@@ -92,4 +95,14 @@ public class BaseServiceLogRequest extends BaseLogRequest
   public void setBundleId(String bundleId) {
     this.bundleId = bundleId;
   }
+
+  @Override
+  public String getHostList() {
+    return hostList;
+  }
+
+  @Override
+  public void setHostList(String hostList) {
+    this.hostList = hostList;
+  }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/2591ba11/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/dashboard/BubbleGraphTableLayoutView.js
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/dashboard/BubbleGraphTableLayoutView.js b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/dashboard/BubbleGraphTableLayoutView.js
index 988c593..e144ae9 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/dashboard/BubbleGraphTableLayoutView.js
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/dashboard/BubbleGraphTableLayoutView.js
@@ -160,7 +160,7 @@ define(['require',
 						host_name :  host,
 						component_name : component,
 						sourceLogId: id
-					},that.graphParams,{treeParams:null}),
+					},that.graphParams,{hostList:null}),
 					globalVent : that.globalVent
 				});
 			}
@@ -619,7 +619,7 @@ define(['require',
 //							iMessage : that.collection.queryParams.iMessage,
 //							eMessage : that.collection.queryParams.eMessage,
 //							query : that.collection.queryParams.query
-						},that.collection.queryParams,{treeParams:null}),
+						},that.collection.queryParams,{hostList:null}),
 						globalVent : that.globalVent
 					}/*)*/);
 				} else {

http://git-wip-us.apache.org/repos/asf/ambari/blob/2591ba11/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/dashboard/ComponentListView.js
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/dashboard/ComponentListView.js b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/dashboard/ComponentListView.js
index 029c25d..f3bf985 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/dashboard/ComponentListView.js
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/dashboard/ComponentListView.js
@@ -191,7 +191,7 @@ define(['require',
 						params:_.extend({},{
 							host_name :  host,
 							component_name : component
-						},that.searchParams,{treeParams:null}),
+            }, that.searchParams, {hostList: null}),
 						globalVent : that.globalVent
 					});
 				},

http://git-wip-us.apache.org/repos/asf/ambari/blob/2591ba11/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/dashboard/HostListView.js
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/dashboard/HostListView.js b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/dashboard/HostListView.js
index b8c11d8..4283f02 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/dashboard/HostListView.js
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/dashboard/HostListView.js
@@ -109,7 +109,7 @@ define(['require',
 		},
 		fetchHosts : function(params){
 			var that = this;
-			$.extend(this.collection.queryParams,params,{treeParams:null});
+      $.extend(this.collection.queryParams, params, {hostList: null});
 			this.collection.fetch({
 				reset:true,
 				complete : function(){
@@ -250,7 +250,7 @@ define(['require',
 						params:_.extend({},{
 							host_name :  host,
 							component_name : component
-						},that.searchParams,{treeParams:null}),
+						},that.searchParams,{hostList:null}),
 						globalVent : that.globalVent
 					});
 				}
@@ -278,8 +278,8 @@ define(['require',
 					mainParent.find("input[data-type='H']").prop("indeterminate",false);
 				
 			}
-			var data = this.getCheckedHierarchyData();
-			this.vent.trigger("tree:search",{treeParams : JSON.stringify(_.pluck(data,"h"))});
+      var data = this.getCheckedHierarchyData();
+      this.vent.trigger("tree:search", {hostList: (_.pluck(data, "h")).toString()});
 		},
 		getCheckedHierarchyData : function(){
 			var data=[];
@@ -303,8 +303,8 @@ define(['require',
 			}else
 				this.$('.tree  input[type="checkbox"]').prop({"checked":false,"indeterminate":false});
 			var data = this.getCheckedHierarchyData();
-			this.params.treeParams = _.extend({},data);
-			this.vent.trigger("tree:search",{treeParams : JSON.stringify(_.pluck(data,"h"))});
+			this.params.hostList = _.extend({},data);
+			this.vent.trigger("tree:search",{hostList : (_.pluck(data,"h")).toString()});
 			
 		},
 		onSearchHostClick : function(e){
@@ -329,7 +329,7 @@ define(['require',
 			this.ui.searcHostBtn.find("i").removeClass().addClass("fa fa-search");
 		},
 		restoreCheckbox : function(){
-			var params = (this.params.treeParams) ? JSON.parse(this.params.treeParams) : undefined,that=this;
+      var params = (this.params.hostList) ? this.params.hostList.toString() : undefined, that = this;
 			if(params){
 				that.$("input[data-node]").prop("checked",false);
 				_.each(params,function(node){

http://git-wip-us.apache.org/repos/asf/ambari/blob/2591ba11/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/tabs/TreeView.js
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/tabs/TreeView.js b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/tabs/TreeView.js
index c395495..ee12e8b 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/tabs/TreeView.js
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/tabs/TreeView.js
@@ -151,7 +151,7 @@ define(['require',
 			this.restoreCheckbox();
 		},
 		restoreCheckbox : function(){
-			var params = (this.params.treeParams) ? JSON.parse(this.params.treeParams) : undefined,that=this;
+			var params = (this.params.hostList) ? JSON.parse(this.params.hostList) : undefined,that=this;
 			if(params){
 				that.$("input[data-node]").prop("checked",false);
 				_.each(params,function(node){
@@ -274,8 +274,8 @@ define(['require',
 			}else
 				this.$('.tree  input[type="checkbox"]').prop({"checked":false,"indeterminate":false});
 			var data = this.getCheckedHierarchyData();
-			this.params.treeParams = _.extend({},data);
-			this.vent.trigger("tree:search",{treeParams : JSON.stringify(data)});
+			this.params.hostList = _.extend({},data);
+			this.vent.trigger("tree:search",{hostList : data.toString()});
 			
 		},
 		onChangeNodeCheckbox : function(e){
@@ -300,7 +300,7 @@ define(['require',
 				
 			}
 			var data = this.getCheckedHierarchyData();
-			this.vent.trigger("tree:search",{treeParams : JSON.stringify(data)});
+			this.vent.trigger("tree:search",{hostList : data.toString()});
 		},
 		onNewTabIconClick : function(e){
 			var $el = $(e.currentTarget),host,component,that=this;
@@ -311,7 +311,7 @@ define(['require',
 					params:_.extend({},{
 						host_name :  host,
 						component_name : component
-					},that.searchParams,{treeParams:null}),
+					},that.searchParams,{hostList:null}),
 					globalVent : that.globalVent
 				}/*)*/);
 			}

http://git-wip-us.apache.org/repos/asf/ambari/blob/2591ba11/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/templates/tabs/TreeView_tmpl.html
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/templates/tabs/TreeView_tmpl.html b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/templates/tabs/TreeView_tmpl.html
index f9a6343..3420ca2 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/templates/tabs/TreeView_tmpl.html
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/templates/tabs/TreeView_tmpl.html
@@ -15,83 +15,40 @@
   limitations under the License.
 -->
 <div class="row">
-	<div class="col-md-12">
-		<div class="box">
-			<div class="box-header">
-				<div class="box-name">
-					<span>Tree View</span>
-				</div>
-				<div class="box-icons">
-					<!-- a class="collapse-link"> <i class="fa fa-save"></i>
-					</a> <a class="collapse-link"> <i class="fa fa-info"></i>
-					</a> <a class="expand-link"> <i class="fa fa-gear"></i>
-					</a--> <a class="collapse-link"> <i class="fa fa-chevron-up"></i>
-					</a> <!-- a class="close-link"> <i class="fa fa-close"></i>
-					</a-->
-				</div>
-				<div class="no-move"></div>
-			</div>
-			<div class="box-content">
-				<!-- div class="side-panel">
-					<!-- button title="Search by name" class="panel-trigger"><i class="fa fa-search"></i></button-->
-					<div class="clearfix row-margin-bottom">
-						<label class="checkbox pull-right no-margin small-85"> <input id="mainCheck" checked ="checked"
-							type="checkbox"> Select / Deselect All <i
-							class="fa fa-square-o small"></i>
-						</label>
-					</div>
-				    <div class="clearfix">
-				        <div class="input-group">
-				            <input type="text" id="searchNode" class="form-control" placeholder="Search">
-				            <span class="input-group-btn">
-				                <button class="btn btn-info btn-search" id="prevSrch" type="button"><i class="fa fa-chevron-left"></i></button>
-				                <button class="btn btn-info btn-search" id="nextSrch" type="button"><i class="fa fa-chevron-right"></i></button>
-				            </span>
-				        </div>
-				    </div>
-				<!-- /div-->
-				
-				<div class="tree smart-form">
-				<ul></ul>
-					<!-- ul>
-						<li><span><i class="fa fa-minus-circle"></i> Parent</span>
-							<ul>
-								<li><span><i class="fa fa-plus-circle"></i>
-										Administrators</span>
-									<ul>
-										<li style="display: none"><span>Michael Jackson</span></li>
-										<li style="display: none"><span>Eminem</span></li>
-										<li style="display: none"><span>Enrique Iglesias</span></li>
-									</ul></li>
-								<li><span><i class="fa fa-minus-circle"></i> Child</span>
-									<ul>
-										<li><span>Grand Child</span></li>
-										<li><span>Grand Child</span></li>
-										<li><span><i class="fa fa-plus-circle"></i> Grand
-												Child</span>
-											<ul>
-												<li style="display: none"><span>Great Grand
-														Child</span>
-													<ul>
-														<li style="display: none"><span>Great great
-																Grand Child</span></li>
-														<li style="display: none"><span>Great great
-																Grand Child</span></li>
-													</ul></li>
-												<li style="display: none"><span>Great Grand
-														Child</span></li>
-												<li style="display: none"><span>Great Grand
-														Child</span></li>
-											</ul></li>
-									</ul></li>
-							</ul></li>
-						<li><span><i class="fa fa-minus-circle"></i> Parent2</span>
-							<ul>
-								<li><span>Child</span></li>
-							</ul></li>
-					</ul-->
-				</div>
-			</div>
-		</div>
-	</div>
+    <div class="col-md-12">
+        <div class="box">
+            <div class="box-header">
+                <div class="box-name">
+                    <span>Tree View</span>
+                </div>
+                <div class="box-icons">
+                    <a class="collapse-link"><i class="fa fa-chevron-up"></i>
+                    </a>
+                </div>
+                <div class="no-move"></div>
+            </div>
+            <div class="box-content">
+                <div class="clearfix row-margin-bottom">
+                    <label class="checkbox pull-right no-margin small-85">
+                        <input id="mainCheck" checked="checked" type="checkbox"> Select / Deselect All
+                        <i class="fa fa-square-o small"></i>
+                    </label>
+                </div>
+                <div class="clearfix">
+                    <div class="input-group">
+                        <input type="text" id="searchNode" class="form-control" placeholder="Search">
+            <span class="input-group-btn">
+                <button class="btn btn-info btn-search" id="prevSrch" type="button"><i class="fa fa-chevron-left"></i>
+                </button>
+               <button class="btn btn-info btn-search" id="nextSrch" type="button"><i class="fa fa-chevron-right"></i>
+               </button>
+             </span>
+                    </div>
+                </div>
+                <div class="tree smart-form">
+                    <ul></ul>
+                </div>
+            </div>
+        </div>
+    </div>
 </div>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/2591ba11/ambari-logsearch/ambari-logsearch-portal/src/test/java/org/apache/ambari/logsearch/converter/ServiceLogComponentLevelRequestQueryConverterTest.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/test/java/org/apache/ambari/logsearch/converter/ServiceLogComponentLevelRequestQueryConverterTest.java b/ambari-logsearch/ambari-logsearch-portal/src/test/java/org/apache/ambari/logsearch/converter/ServiceLogComponentLevelRequestQueryConverterTest.java
index b1ae332..4378a64 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/test/java/org/apache/ambari/logsearch/converter/ServiceLogComponentLevelRequestQueryConverterTest.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/test/java/org/apache/ambari/logsearch/converter/ServiceLogComponentLevelRequestQueryConverterTest.java
@@ -45,9 +45,10 @@ public class ServiceLogComponentLevelRequestQueryConverterTest extends AbstractR
     // WHEN
     SolrQuery query = new DefaultQueryParser().doConstructSolrQuery(underTest.convert(request));
     // THEN
-    assertEquals("?q=*%3A*&rows=0&fq=logtime%3A%5B2016-09-13T22%3A00%3A01.000Z+TO+2016-09-14T22%3A00%3A01.000Z%5D&fq=log_message%3Amyincludemessage" +
-      "&fq=-log_message%3Amyexcludemessage&fq=type%3A%28logsearch_app+secure_log%29&fq=-type%3A%28hst_agent+system_message%29" +
-      "&fq=level%3A%28WARN+ERROR+FATAL%29&facet=true&facet.mincount=1&facet.limit=-1&facet.sort=index&facet.pivot=type%2Clevel",
+    assertEquals("?q=*%3A*&rows=0&fq=logtime%3A%5B2016-09-13T22%3A00%3A01.000Z+TO+2016-09-14T22%3A00%3A01.000Z%5D" +
+      "&fq=log_message%3Amyincludemessage&fq=-log_message%3Amyexcludemessage&fq=type%3A%28logsearch_app+secure_log%29" +
+      "&fq=-type%3A%28hst_agent+system_message%29&fq=type%3Amycomponent&fq=level%3A%28WARN+ERROR+FATAL%29&facet=true" +
+      "&facet.mincount=1&facet.limit=-1&facet.sort=index&facet.pivot=type%2Clevel",
       query.toQueryString());
   }
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/2591ba11/ambari-logsearch/ambari-logsearch-portal/src/test/java/org/apache/ambari/logsearch/converter/ServiceLogComponentRequestFacetQueryConverterTest.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/test/java/org/apache/ambari/logsearch/converter/ServiceLogComponentRequestFacetQueryConverterTest.java b/ambari-logsearch/ambari-logsearch-portal/src/test/java/org/apache/ambari/logsearch/converter/ServiceLogComponentRequestFacetQueryConverterTest.java
index 3c7f242..ce9cdcc 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/test/java/org/apache/ambari/logsearch/converter/ServiceLogComponentRequestFacetQueryConverterTest.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/test/java/org/apache/ambari/logsearch/converter/ServiceLogComponentRequestFacetQueryConverterTest.java
@@ -47,8 +47,8 @@ public class ServiceLogComponentRequestFacetQueryConverterTest extends AbstractR
     // THEN
     assertEquals("?q=*%3A*&rows=0&fq=logtime%3A%5B2016-09-13T22%3A00%3A01.000Z+TO+2016-09-14T22%3A00%3A01.000Z%5D" +
       "&fq=log_message%3Amyincludemessage&fq=-log_message%3Amyexcludemessage&fq=type%3A%28logsearch_app+secure_log%29" +
-      "&fq=-type%3A%28hst_agent+system_message%29&fq=level%3A%28WARN+ERROR+FATAL%29&facet=true&facet.mincount=1&facet.limit=-1" +
-      "&facet.sort=index&facet.pivot=type%2Chost%2Clevel&facet.pivot=type%2Clevel",
+      "&fq=-type%3A%28hst_agent+system_message%29&fq=type%3Amycomponent&fq=level%3A%28WARN+ERROR+FATAL%29" +
+      "&facet=true&facet.mincount=1&facet.limit=-1&facet.sort=index&facet.pivot=type%2Chost%2Clevel&facet.pivot=type%2Clevel",
       query.toQueryString());
   }
 


[33/50] ambari git commit: AMBARI-18997 ambari-server.pid might not be created on slow hardware (dsen)

Posted by sw...@apache.org.
AMBARI-18997 ambari-server.pid might not be created on slow hardware (dsen)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/67292971
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/67292971
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/67292971

Branch: refs/heads/branch-feature-AMBARI-18901
Commit: 67292971c4b87246bb9f1df96939e89073af6dbc
Parents: fbff7f7
Author: Dmytro Sen <ds...@apache.org>
Authored: Wed Nov 30 14:04:51 2016 +0200
Committer: Dmytro Sen <ds...@apache.org>
Committed: Wed Nov 30 14:04:51 2016 +0200

----------------------------------------------------------------------
 .../src/main/python/ambari_server/utils.py      | 24 ++++-------
 .../src/main/python/ambari_server_main.py       | 44 ++++++++++++--------
 .../src/test/python/TestAmbariServer.py         |  4 +-
 ambari-server/src/test/python/TestUtils.py      | 26 +-----------
 4 files changed, 36 insertions(+), 62 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/67292971/ambari-server/src/main/python/ambari_server/utils.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/python/ambari_server/utils.py b/ambari-server/src/main/python/ambari_server/utils.py
index f505444..62c93ae 100644
--- a/ambari-server/src/main/python/ambari_server/utils.py
+++ b/ambari-server/src/main/python/ambari_server/utils.py
@@ -117,19 +117,19 @@ def save_pid(pid, pidfile):
       pass
 
 
-def save_main_pid_ex(pids, pidfile, exclude_list=[], kill_exclude_list=False, skip_daemonize=False):
+def save_main_pid_ex(pids, pidfile, exclude_list=[], skip_daemonize=False):
   """
     Save pid which is not included to exclude_list to pidfile.
-    If kill_exclude_list is set to true,  all processes in that
-    list would be killed. It's might be useful to daemonize child process
 
     exclude_list contains list of full executable paths which should be excluded
   """
+  pid_saved = False
   try:
     pfile = open(pidfile, "w")
     for item in pids:
       if pid_exists(item["pid"]) and (item["exe"] not in exclude_list):
         pfile.write("%s\n" % item["pid"])
+        pid_saved = True
         logger.info("Ambari server started with PID " + str(item["pid"]))
       if pid_exists(item["pid"]) and (item["exe"] in exclude_list) and not skip_daemonize:
         try:
@@ -145,23 +145,13 @@ def save_main_pid_ex(pids, pidfile, exclude_list=[], kill_exclude_list=False, sk
     except Exception as e:
       logger.error("Failed to close PID file " + pidfile + " due to " + str(e))
       pass
+  return pid_saved
 
-
-def wait_for_pid(pids, timeout):
+def get_live_pids_count(pids):
   """
-    Check pid for existence during timeout
+    Check pids for existence
   """
-  tstart = time.time()
-  pid_live = 0
-  while int(time.time()-tstart) <= timeout and len(pids) > 0:
-    sys.stdout.write('.')
-    sys.stdout.flush()
-    pid_live = 0
-    for item in pids:
-      if pid_exists(item["pid"]):
-        pid_live += 1
-    time.sleep(1)
-  return pid_live
+  return len([pid for pid in pids if pid_exists(pid)])
 
 
 def get_symlink_path(path_to_link):

http://git-wip-us.apache.org/repos/asf/ambari/blob/67292971/ambari-server/src/main/python/ambari_server_main.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/python/ambari_server_main.py b/ambari-server/src/main/python/ambari_server_main.py
index b35cfc9..572de4e 100644
--- a/ambari-server/src/main/python/ambari_server_main.py
+++ b/ambari-server/src/main/python/ambari_server_main.py
@@ -38,8 +38,8 @@ from ambari_server.serverUtils import refresh_stack_hash
 from ambari_server.setupHttps import get_fqdn
 from ambari_server.setupSecurity import generate_env, \
   ensure_can_start_under_current_user
-from ambari_server.utils import check_reverse_lookup, save_pid, locate_file, locate_all_file_paths, looking_for_pid, wait_for_pid, \
-  save_main_pid_ex, check_exitcode
+from ambari_server.utils import check_reverse_lookup, save_pid, locate_file, locate_all_file_paths, looking_for_pid, \
+  save_main_pid_ex, check_exitcode, get_live_pids_count
 from ambari_server.serverClassPath import ServerClassPath
 
 logger = logging.getLogger(__name__)
@@ -101,8 +101,8 @@ SERVER_START_CMD_DEBUG_WINDOWS = "{0} " \
     "-cp {3} " \
     "org.apache.ambari.server.controller.AmbariServer"
 
-SERVER_INIT_TIMEOUT = 5
-SERVER_START_TIMEOUT = 30
+SERVER_START_TIMEOUT = 5
+SERVER_START_RETRIES = 4
 
 SERVER_PING_TIMEOUT_WINDOWS = 5
 SERVER_PING_ATTEMPTS_WINDOWS = 4
@@ -114,6 +114,7 @@ EXITCODE_NAME = "ambari-server.exitcode"
 CHECK_DATABASE_SKIPPED_PROPERTY = "check_database_skipped"
 
 AMBARI_SERVER_DIE_MSG = "Ambari Server java process died with exitcode {0}. Check {1} for more information."
+AMBARI_SERVER_NOT_STARTED_MSG = "Ambari Server java process hasn't been started or can't be determined."
 
 # linux open-file limit
 ULIMIT_OPEN_FILES_KEY = 'ulimit.open.files'
@@ -200,12 +201,23 @@ def wait_for_server_start(pidFile, scmStatus):
   #wait for server process for SERVER_START_TIMEOUT seconds
   sys.stdout.write('Waiting for server start...')
   sys.stdout.flush()
-
-  pids = looking_for_pid(SERVER_SEARCH_PATTERN, SERVER_INIT_TIMEOUT)
-  found_pids = wait_for_pid(pids, SERVER_START_TIMEOUT)
-
-  sys.stdout.write('\n')
-  sys.stdout.flush()
+  pids = []
+  server_started = False
+  # looking_for_pid() might return partrial pid list on slow hardware
+  for i in range(1, SERVER_START_RETRIES):
+    pids = looking_for_pid(SERVER_SEARCH_PATTERN, SERVER_START_TIMEOUT)
+
+    sys.stdout.write('\n')
+    sys.stdout.flush()
+
+    if save_main_pid_ex(pids, pidFile, locate_all_file_paths('sh', '/bin') +
+                        locate_all_file_paths('bash', '/bin') +
+                        locate_all_file_paths('dash', '/bin'), IS_FOREGROUND):
+      server_started = True
+      break
+    else:
+      sys.stdout.write("Unable to determine server PID. Retrying...\n")
+      sys.stdout.flush()
 
   if 'Database consistency check: failed' in open(configDefaults.SERVER_OUT_FILE).read():
     print "DB configs consistency check failed. Run \"ambari-server start --skip-database-check\" to skip. " \
@@ -218,15 +230,13 @@ def wait_for_server_start(pidFile, scmStatus):
   else:
     print "DB configs consistency check: no errors and warnings were found."
 
-
-  if found_pids <= 0:
+  if server_started:
+    return
+  elif get_live_pids_count(pids) <= 0:
     exitcode = check_exitcode(os.path.join(configDefaults.PID_DIR, EXITCODE_NAME))
     raise FatalException(-1, AMBARI_SERVER_DIE_MSG.format(exitcode, configDefaults.SERVER_OUT_FILE))
   else:
-    save_main_pid_ex(pids, pidFile, locate_all_file_paths('sh', '/bin') +
-                                     locate_all_file_paths('bash', '/bin') +
-                                     locate_all_file_paths('dash', '/bin'), True, IS_FOREGROUND)
-
+    raise FatalException(-1, AMBARI_SERVER_NOT_STARTED_MSG)
 
 def server_process_main(options, scmStatus=None):
   properties = get_ambari_properties()
@@ -358,7 +368,7 @@ def server_process_main(options, scmStatus=None):
     raise FatalException(-1, AMBARI_SERVER_DIE_MSG.format(exitcode, configDefaults.SERVER_OUT_FILE))
   else:
     pidfile = os.path.join(configDefaults.PID_DIR, PID_NAME)
-    save_pid(pidJava, pidfile)
+
     print "Server PID at: "+pidfile
     print "Server out at: "+configDefaults.SERVER_OUT_FILE
     print "Server log at: "+configDefaults.SERVER_LOG_FILE

http://git-wip-us.apache.org/repos/asf/ambari/blob/67292971/ambari-server/src/test/python/TestAmbariServer.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/TestAmbariServer.py b/ambari-server/src/test/python/TestAmbariServer.py
index 424ddde..59fc975 100644
--- a/ambari-server/src/test/python/TestAmbariServer.py
+++ b/ambari-server/src/test/python/TestAmbariServer.py
@@ -4398,7 +4398,6 @@ class TestAmbariServer(TestCase):
   @patch("sys.stdout.flush")
   @patch("sys.stdout.write")
   @patch("ambari_server_main.looking_for_pid")
-  @patch("ambari_server_main.wait_for_pid")
   @patch("ambari_server_main.save_main_pid_ex")
   @patch("ambari_server_main.check_exitcode")
   @patch("os.makedirs")
@@ -4448,7 +4447,7 @@ class TestAmbariServer(TestCase):
                  save_master_key_method, get_master_key_location_method,
                  os_chown_mock, is_server_running_mock, locate_file_mock,
                  os_makedirs_mock, check_exitcode_mock, save_main_pid_ex_mock,
-                 wait_for_pid_mock, looking_for_pid_mock, stdout_write_mock, stdout_flush_mock,
+                 looking_for_pid_mock, stdout_write_mock, stdout_flush_mock,
                  get_is_active_instance_mock):
 
     def reset_mocks():
@@ -4485,7 +4484,6 @@ class TestAmbariServer(TestCase):
         "exe": "/test",
         "cmd": "test arg"
     }]
-    wait_for_pid_mock.return_value = 1
     check_exitcode_mock.return_value = 0
 
     p = Properties()

http://git-wip-us.apache.org/repos/asf/ambari/blob/67292971/ambari-server/src/test/python/TestUtils.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/TestUtils.py b/ambari-server/src/test/python/TestUtils.py
index db94d92..3f2ccf8 100644
--- a/ambari-server/src/test/python/TestUtils.py
+++ b/ambari-server/src/test/python/TestUtils.py
@@ -120,30 +120,6 @@ class TestUtils(TestCase):
     normpath_mock.return_value = "test value"
     self.assertEquals(utils.get_symlink_path("/"), "test value")
 
-  @patch('time.time')
-  @patch.object(utils, 'pid_exists')
-  @patch('time.sleep')
-  def test_wait_for_pid(self, sleep_mock, pid_exists_mock, time_mock):
-    pid_exists_mock.return_value = True
-    time_mock.side_effect = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 11]
-
-    out = StringIO.StringIO()
-    sys.stdout = out
-    live_pids = utils.wait_for_pid([
-                                   {"pid": "111",
-                                    "exe": "",
-                                    "cmd": ""
-                                    },
-                                   {"pid": "222",
-                                    "exe": "",
-                                    "cmd": ""
-                                    },
-                                   ], 10)
-    self.assertEqual("..........", out.getvalue())
-    sys.stdout = sys.__stdout__
-
-    self.assertEquals(2, live_pids)
-
   @patch.object(utils, 'pid_exists')
   @patch('__builtin__.open')
   @patch('os.kill')
@@ -170,7 +146,7 @@ class TestUtils(TestCase):
                              "exe": "/exe2",
                              "cmd": ""
                              },
-                            ], "/pidfile", ["/exe1"], True)
+                            ], "/pidfile", ["/exe1"])
     self.assertEquals(open_mock.call_count, 1)
     self.assertEquals(pid_exists_mock.call_count, 4)
     self.assertEquals(kill_mock.call_count, 1)


[43/50] ambari git commit: AMBARI-18994. In HA cluster copy the hdfs-site.xml and hbase-site.xml to Ranger conf directory (Mugdha Varadkar via smohanty)

Posted by sw...@apache.org.
AMBARI-18994. In HA cluster copy the hdfs-site.xml and hbase-site.xml to Ranger conf directory (Mugdha Varadkar via smohanty)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/037ed632
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/037ed632
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/037ed632

Branch: refs/heads/branch-feature-AMBARI-18901
Commit: 037ed63264c21d723bbba6b0acd3724f0eed687b
Parents: 7632917
Author: Sumit Mohanty <sm...@hortonworks.com>
Authored: Wed Nov 30 12:51:46 2016 -0800
Committer: Sumit Mohanty <sm...@hortonworks.com>
Committed: Wed Nov 30 12:51:46 2016 -0800

----------------------------------------------------------------------
 .../RANGER/0.4.0/package/scripts/params.py      | 14 +++++++++++++
 .../0.4.0/package/scripts/setup_ranger_xml.py   | 21 ++++++++++++++++++++
 .../common-services/RANGER/0.6.0/metainfo.xml   |  2 ++
 3 files changed, 37 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/037ed632/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/params.py b/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/params.py
index 6debaf1..e27b363 100644
--- a/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/params.py
@@ -385,3 +385,17 @@ if audit_solr_enabled and is_solrCloud_enabled:
   # Check external solrCloud
   if is_external_solrCloud_enabled and is_external_solrCloud_kerberos:
     ranger_is_solr_kerberised = "true"
+
+hbase_master_hosts = default("/clusterHostInfo/hbase_master_hosts", [])
+is_hbase_ha_enabled = True if len(hbase_master_hosts) > 1 else False
+is_namenode_ha_enabled = True if len(namenode_hosts) > 1 else False
+ranger_hbase_plugin_enabled = False
+ranger_hdfs_plugin_enabled = False
+
+
+if is_hbase_ha_enabled:
+  if not is_empty(config['configurations']['ranger-hbase-plugin-properties']['ranger-hbase-plugin-enabled']):
+    ranger_hbase_plugin_enabled = config['configurations']['ranger-hbase-plugin-properties']['ranger-hbase-plugin-enabled'].lower() == 'yes'
+if is_namenode_ha_enabled:
+  if not is_empty(config['configurations']['ranger-hdfs-plugin-properties']['ranger-hdfs-plugin-enabled']):
+    ranger_hdfs_plugin_enabled = config['configurations']['ranger-hdfs-plugin-properties']['ranger-hdfs-plugin-enabled'].lower() == 'yes'
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/037ed632/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/setup_ranger_xml.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/setup_ranger_xml.py b/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/setup_ranger_xml.py
index f1b4797..6386778 100644
--- a/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/setup_ranger_xml.py
+++ b/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/setup_ranger_xml.py
@@ -198,6 +198,27 @@ def setup_ranger_admin(upgrade_type=None):
 
   create_core_site_xml(ranger_conf)
 
+  if params.stack_supports_ranger_kerberos and params.security_enabled:
+    if params.is_hbase_ha_enabled and params.ranger_hbase_plugin_enabled:
+      XmlConfig("hbase-site.xml",
+        conf_dir=ranger_conf,
+        configurations=params.config['configurations']['hbase-site'],
+        configuration_attributes=params.config['configuration_attributes']['hbase-site'],
+        owner=params.unix_user,
+        group=params.unix_group,
+        mode=0644
+      )
+
+    if params.is_namenode_ha_enabled and params.ranger_hdfs_plugin_enabled:
+      XmlConfig("hdfs-site.xml",
+        conf_dir=ranger_conf,
+        configurations=params.config['configurations']['hdfs-site'],
+        configuration_attributes=params.config['configuration_attributes']['hdfs-site'],
+        owner=params.unix_user,
+        group=params.unix_group,
+        mode=0644
+      )
+
 def setup_ranger_db(stack_version=None):
   import params
   

http://git-wip-us.apache.org/repos/asf/ambari/blob/037ed632/ambari-server/src/main/resources/common-services/RANGER/0.6.0/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/RANGER/0.6.0/metainfo.xml b/ambari-server/src/main/resources/common-services/RANGER/0.6.0/metainfo.xml
index f330bc9..12fde7e 100644
--- a/ambari-server/src/main/resources/common-services/RANGER/0.6.0/metainfo.xml
+++ b/ambari-server/src/main/resources/common-services/RANGER/0.6.0/metainfo.xml
@@ -110,6 +110,8 @@
       <configuration-dependencies>
         <config-type>admin-log4j</config-type>
         <config-type>usersync-log4j</config-type>
+        <config-type>ranger-hdfs-plugin-properties</config-type>
+        <config-type>ranger-hbase-plugin-properties</config-type>
       </configuration-dependencies>
 
     </service>