You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by nc...@apache.org on 2016/02/23 15:07:07 UTC

[01/12] ambari git commit: AMBARI-15045. NPE in ambari server log related tez view. (dipayanb)

Repository: ambari
Updated Branches:
  refs/heads/branch-dev-patch-upgrade 55342fc62 -> 50547c5ed


AMBARI-15045. NPE in ambari server log related tez view. (dipayanb)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/1dbf7d4e
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/1dbf7d4e
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/1dbf7d4e

Branch: refs/heads/branch-dev-patch-upgrade
Commit: 1dbf7d4ef5d41837f36c961d333506057751a855
Parents: 5366af8
Author: Dipayan Bhowmick <di...@gmail.com>
Authored: Mon Feb 22 22:28:05 2016 +0530
Committer: Dipayan Bhowmick <di...@gmail.com>
Committed: Mon Feb 22 22:28:05 2016 +0530

----------------------------------------------------------------------
 .../java/org/apache/ambari/view/tez/utils/ProxyHelper.java    | 7 +++++--
 1 file changed, 5 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/1dbf7d4e/contrib/views/tez/src/main/java/org/apache/ambari/view/tez/utils/ProxyHelper.java
----------------------------------------------------------------------
diff --git a/contrib/views/tez/src/main/java/org/apache/ambari/view/tez/utils/ProxyHelper.java b/contrib/views/tez/src/main/java/org/apache/ambari/view/tez/utils/ProxyHelper.java
index a471544..f642193 100644
--- a/contrib/views/tez/src/main/java/org/apache/ambari/view/tez/utils/ProxyHelper.java
+++ b/contrib/views/tez/src/main/java/org/apache/ambari/view/tez/utils/ProxyHelper.java
@@ -54,10 +54,13 @@ public class ProxyHelper {
       URLConnectionProvider provider = viewContext.getURLConnectionProvider();
       HttpURLConnection connection = provider.getConnectionAsCurrent(url, "GET", (String) null, headers);
 
-      if(connection.getResponseCode() != Response.Status.OK.getStatusCode()) {
+      if (!(connection.getResponseCode() >= 200 && connection.getResponseCode() < 300)) {
         LOG.error("Failure in fetching results for the URL: {}. Status: {}", url, connection.getResponseCode());
+        String trace = "";
         inputStream = connection.getErrorStream();
-        String trace = IOUtils.toString(inputStream);
+        if (inputStream != null) {
+          trace = IOUtils.toString(inputStream);
+        }
         throw new ProxyException("Failed to fetch results by the proxy from url: " + url, connection.getResponseCode(), trace);
       }
 


[09/12] ambari git commit: AMBARI-15120 Alerts UI Usability Changes (Joe Wang via rzang)

Posted by nc...@apache.org.
AMBARI-15120 Alerts UI Usability Changes (Joe Wang via rzang)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/b1be75f9
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/b1be75f9
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/b1be75f9

Branch: refs/heads/branch-dev-patch-upgrade
Commit: b1be75f9a718863b942a5f1117804f1636d42768
Parents: 0682fb6
Author: Richard Zang <rz...@apache.org>
Authored: Mon Feb 22 18:34:37 2016 -0800
Committer: Richard Zang <rz...@apache.org>
Committed: Mon Feb 22 18:34:37 2016 -0800

----------------------------------------------------------------------
 ambari-web/app/controllers/main/host.js         |  2 --
 .../controllers/main/service/info/summary.js    |  7 +++---
 ambari-web/app/data/host/categories.js          | 11 ---------
 .../mappers/alert_definition_summary_mapper.js  | 23 +++++++++++++++++++
 ambari-web/app/models/host_component.js         | 12 ++++++++++
 ambari-web/app/styles/alerts.less               | 24 ++++++++++++--------
 ambari-web/app/templates/main/menu_item.hbs     |  5 ----
 .../service/info/summary/master_components.hbs  |  8 +++++++
 ambari-web/app/views/main/menu.js               |  8 +------
 9 files changed, 62 insertions(+), 38 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/b1be75f9/ambari-web/app/controllers/main/host.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/controllers/main/host.js b/ambari-web/app/controllers/main/host.js
index d3574b2..91b4df2 100644
--- a/ambari-web/app/controllers/main/host.js
+++ b/ambari-web/app/controllers/main/host.js
@@ -329,8 +329,6 @@ App.MainHostController = Em.ArrayController.extend(App.TableServerMixin, {
       'UNHEALTHY': data.Clusters.health_report['Host/host_status/UNHEALTHY'],
       'ALERT': data.Clusters.health_report['Host/host_status/ALERT'],
       'UNKNOWN': data.Clusters.health_report['Host/host_status/UNKNOWN'],
-      'health-status-WITH-ALERTS': (data.alerts_summary_hosts) ? data.alerts_summary_hosts.CRITICAL + data.alerts_summary_hosts.WARNING : 0,
-      'health-status-CRITICAL': (data.alerts_summary_hosts) ? data.alerts_summary_hosts.CRITICAL : 0,
       'health-status-RESTART': data.Clusters.health_report['Host/stale_config'],
       'health-status-PASSIVE_STATE': data.Clusters.health_report['Host/maintenance_state'],
       'TOTAL': data.Clusters.total_hosts

http://git-wip-us.apache.org/repos/asf/ambari/blob/b1be75f9/ambari-web/app/controllers/main/service/info/summary.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/controllers/main/service/info/summary.js b/ambari-web/app/controllers/main/service/info/summary.js
index 0e6cc88..f1740a7 100644
--- a/ambari-web/app/controllers/main/service/info/summary.js
+++ b/ambari-web/app/controllers/main/service/info/summary.js
@@ -294,9 +294,9 @@ App.MainServiceInfoSummaryController = Em.Controller.extend(App.WidgetSectionMix
   },
 
   showServiceAlertsPopup: function (event) {
-    var service = event.context;
+    var context = event.context;
     return App.ModalPopup.show({
-      header: Em.I18n.t('services.service.summary.alerts.popup.header').format(service.get('displayName')),
+      header: Em.I18n.t('services.service.summary.alerts.popup.header').format(context.get('displayName')),
       autoHeight: false,
       classNames: ['forty-percent-width-modal'],
       bodyClass: Em.View.extend({
@@ -312,7 +312,8 @@ App.MainServiceInfoSummaryController = Em.Controller.extend(App.WidgetSectionMix
           this.$(".timeago").tooltip('destroy');
         },
         alerts: function () {
-          var serviceDefinitions = this.get('controller.content').filterProperty('service', service);
+          var property = context.get('componentName') ? 'componentName' : 'serviceName';
+          var serviceDefinitions = this.get('controller.content').filterProperty(property, context.get(property));
           // definitions should be sorted in order: critical, warning, ok, unknown, other
           var criticalDefinitions = [], warningDefinitions = [], okDefinitions = [], unknownDefinitions = [];
           serviceDefinitions.forEach(function (definition) {

http://git-wip-us.apache.org/repos/asf/ambari/blob/b1be75f9/ambari-web/app/data/host/categories.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/data/host/categories.js b/ambari-web/app/data/host/categories.js
index 7e010d5..03e2fee 100644
--- a/ambari-web/app/data/host/categories.js
+++ b/ambari-web/app/data/host/categories.js
@@ -54,17 +54,6 @@ module.exports = [
     healthClass: 'health-status-DEAD-YELLOW'
   },
   {
-    value: Em.I18n.t('hosts.host.alerts.label'),
-    hostProperty: 'criticalWarningAlertsCount',
-    class: 'icon-exclamation-sign',
-    isHealthStatus: false,
-    healthClass: 'health-status-WITH-ALERTS',
-    healthStatus: 'health-status-WITH-ALERTS',
-    column: 7,
-    type: 'custom',
-    filterValue: ['>0', '>0']
-  },
-  {
     value: Em.I18n.t('common.restart'),
     hostProperty: 'componentsWithStaleConfigsCount',
     class: 'icon-refresh',

http://git-wip-us.apache.org/repos/asf/ambari/blob/b1be75f9/ambari-web/app/mappers/alert_definition_summary_mapper.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/mappers/alert_definition_summary_mapper.js b/ambari-web/app/mappers/alert_definition_summary_mapper.js
index 69f1b16..72eeb01 100644
--- a/ambari-web/app/mappers/alert_definition_summary_mapper.js
+++ b/ambari-web/app/mappers/alert_definition_summary_mapper.js
@@ -67,6 +67,7 @@ App.alertDefinitionSummaryMapper = App.QuickDataMapper.create({
     });
     // set alertsCount and hasCriticalAlerts for each service
     var groupedByServiceName = dataManipulation.groupPropertyValues(alertDefinitions, 'service.serviceName');
+    var groupedByComponentName = dataManipulation.groupPropertyValues(alertDefinitions, 'componentName');
     var services = App.Service.find();
     var servicesMap = services.toArray().toMapByProperty('id');
     Object.keys(groupedByServiceName).forEach(function(serviceName) {
@@ -90,6 +91,28 @@ App.alertDefinitionSummaryMapper = App.QuickDataMapper.create({
           alertsCount: alertsCount,
           hasCriticalAlerts: hasCriticalAlerts
         });
+
+        var masters = service.get('hostComponents').filterProperty('isMaster');
+        masters.forEach(function (master) {
+          var hasCriticalAlerts = false;
+
+          var alertsCount = groupedByComponentName[master.get('componentName')].map(function (alertDefinition) {
+
+            var criticalCount = alertDefinition.getWithDefault('summary.CRITICAL.count', 0);
+            var warningCount = alertDefinition.getWithDefault('summary.WARNING.count', 0);
+
+            if (criticalCount) {
+              hasCriticalAlerts = true;
+            }
+            return criticalCount + warningCount;
+
+          }).reduce(Em.sum, 0);
+
+          master.setProperties({
+            alertsCount: alertsCount,
+            hasCriticalAlerts: hasCriticalAlerts
+          });
+        });
       }
     });
     if (!$.mocho) {

http://git-wip-us.apache.org/repos/asf/ambari/blob/b1be75f9/ambari-web/app/models/host_component.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/models/host_component.js b/ambari-web/app/models/host_component.js
index 61a51cd..b7a5f5a 100644
--- a/ambari-web/app/models/host_component.js
+++ b/ambari-web/app/models/host_component.js
@@ -128,6 +128,18 @@ App.HostComponent = DS.Model.extend({
     return !App.get('components.nonHDP').contains(this.get('componentName'));
   }.property('componentName', 'App.components.nonHDP'),
 
+  /**
+   * Does component have Critical Alerts
+   * @type {boolean}
+   */
+  hasCriticalAlerts: false,
+
+  /**
+   * Number of the Critical and Warning alerts for current component
+   * @type {number}
+   */
+  alertsCount: 0,
+
   statusClass: function () {
     return this.get('isActive') ? this.get('workStatus') : 'icon-medkit';
   }.property('workStatus', 'isActive'),

http://git-wip-us.apache.org/repos/asf/ambari/blob/b1be75f9/ambari-web/app/styles/alerts.less
----------------------------------------------------------------------
diff --git a/ambari-web/app/styles/alerts.less b/ambari-web/app/styles/alerts.less
index d4e5d27..d5ba99f 100644
--- a/ambari-web/app/styles/alerts.less
+++ b/ambari-web/app/styles/alerts.less
@@ -477,19 +477,23 @@
     padding: 4px 5px;
     margin: 4px;
     font-size: 14px;
-    cursor: pointer;
-  }
-  .alerts-crit-count {
-    background: @health-status-red;
-  }
-  .alerts-warn-count {
-    background: @health-status-orange;
-  }
-  .no-alerts-label {
-    background: @health-status-green;
   }
 }
 
+.alerts-crit-count, .alerts-warn-count, .no-alerts-label {
+  cursor: pointer;
+}
+
+.alerts-crit-count {
+  background: @health-status-red;
+}
+.alerts-warn-count {
+  background: @health-status-orange;
+}
+.no-alerts-label {
+  background: @health-status-green;
+}
+
 #summary-alerts-popup {
   .alert-list-wrap {
     padding: 10px 5px;

http://git-wip-us.apache.org/repos/asf/ambari/blob/b1be75f9/ambari-web/app/templates/main/menu_item.hbs
----------------------------------------------------------------------
diff --git a/ambari-web/app/templates/main/menu_item.hbs b/ambari-web/app/templates/main/menu_item.hbs
index bd471b4..4d75298 100644
--- a/ambari-web/app/templates/main/menu_item.hbs
+++ b/ambari-web/app/templates/main/menu_item.hbs
@@ -19,11 +19,6 @@
 
 <a href="#" {{action goToSection view.content.routing target="view"}}>
   {{{unbound view.content.label}}}
-  {{#if view.hasAlertsLabel}}
-    <span {{bindAttr class=":label view.hasCriticalAlerts:alerts-crit-count:alerts-warn-count"}}>
-      {{view.alertsCount}}
-    </span>
-  {{/if}}
 </a>
 
 <!--dropdown menu for the items had dropdowns-->

http://git-wip-us.apache.org/repos/asf/ambari/blob/b1be75f9/ambari-web/app/templates/main/service/info/summary/master_components.hbs
----------------------------------------------------------------------
diff --git a/ambari-web/app/templates/main/service/info/summary/master_components.hbs b/ambari-web/app/templates/main/service/info/summary/master_components.hbs
index b4ef0af..460b048 100644
--- a/ambari-web/app/templates/main/service/info/summary/master_components.hbs
+++ b/ambari-web/app/templates/main/service/info/summary/master_components.hbs
@@ -30,6 +30,14 @@
     <td {{bindAttr class=":summary-value comp.summaryValueClassName"}}>
       <span rel='SummaryComponentHealthTooltip' {{bindAttr class="comp.statusClass comp.statusIconClass" data-original-title="comp.passiveTooltip"}}></span>
       {{comp.componentTextStatus}}
+      {{#if comp.alertsCount}}
+        <span {{action "showServiceAlertsPopup" comp target="controller"}}
+          {{bindAttr class=":label comp.hasCriticalAlerts:alerts-crit-count:alerts-warn-count"}}>
+          {{comp.alertsCount}} {{pluralize comp.alertsCount singular="alert" plural="alerts"}}</span>
+      {{else}}
+        <span {{action "showServiceAlertsPopup" comp target="controller"}}
+          class="label no-alerts-label">{{t services.service.summary.alerts.noAlerts}}</span>
+      {{/if}}
     </td>
   </tr>
 {{/each}}

http://git-wip-us.apache.org/repos/asf/ambari/blob/b1be75f9/ambari-web/app/views/main/menu.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views/main/menu.js b/ambari-web/app/views/main/menu.js
index 697c85d..b71bbd3 100644
--- a/ambari-web/app/views/main/menu.js
+++ b/ambari-web/app/views/main/menu.js
@@ -39,7 +39,7 @@ App.MainMenuView = Em.CollectionView.extend({
           result.push(
               {label: Em.I18n.t('menu.item.dashboard'), routing: 'dashboard', active: 'active'},
               {label: Em.I18n.t('menu.item.services'), routing: 'services'},
-              {label: Em.I18n.t('menu.item.hosts'), routing: 'hosts', hasAlertsLabel: true},
+              {label: Em.I18n.t('menu.item.hosts'), routing: 'hosts'},
               {label: Em.I18n.t('menu.item.alerts'), routing: 'alerts'}
           );
         }
@@ -76,12 +76,6 @@ App.MainMenuView = Em.CollectionView.extend({
       return "";
     }.property('App.router.location.lastSetURL', 'App.router.clusterController.isLoaded'),
 
-    alertsCount: Em.computed.alias('App.router.mainHostController.hostsCountMap.health-status-WITH-ALERTS'),
-
-    hasCriticalAlerts: Em.computed.gt('App.router.mainHostController.hostsCountMap.health-status-CRITICAL', 0),
-
-    hasAlertsLabel: Em.computed.and('content.hasAlertsLabel', 'alertsCount'),
-
     templateName: require('templates/main/menu_item'),
 
     dropdownMenu: Em.computed.existsIn('content.routing', ['services', 'admin', 'views']),


[03/12] ambari git commit: AMBARI-14827. HiveViews: Value in cell coming up as null for select query. (Gaurav Nagar via dipayanb)

Posted by nc...@apache.org.
AMBARI-14827. HiveViews: Value in cell coming up as null for select query. (Gaurav Nagar via dipayanb)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/9dfe929e
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/9dfe929e
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/9dfe929e

Branch: refs/heads/branch-dev-patch-upgrade
Commit: 9dfe929ee399953177611fe1f26abe98f5b9a265
Parents: b0ab893
Author: Dipayan Bhowmick <di...@gmail.com>
Authored: Mon Feb 22 22:50:52 2016 +0530
Committer: Dipayan Bhowmick <di...@gmail.com>
Committed: Mon Feb 22 22:50:52 2016 +0530

----------------------------------------------------------------------
 .../view/hive/resources/jobs/ResultsPaginationController.java | 7 ++++++-
 1 file changed, 6 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/9dfe929e/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/ResultsPaginationController.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/ResultsPaginationController.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/ResultsPaginationController.java
index 84dec4d..cc2ff42 100644
--- a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/ResultsPaginationController.java
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/ResultsPaginationController.java
@@ -190,7 +190,12 @@ public class ResultsPaginationController {
       for(Object[] row : rows ){
         String[] strs = new String[row.length];
         for( int colNum = 0 ; colNum < row.length ; colNum++ ){
-          strs[colNum] = String.valueOf(row[colNum]);
+          String value = String.valueOf(row[colNum]);
+          if(row[colNum] != null && (value.isEmpty() || value.equalsIgnoreCase("null"))){
+            strs[colNum] = String.format("\"%s\"",value);
+          }else{
+            strs[colNum] = value;
+          }
         }
         this.rows.add(strs);
       }


[06/12] ambari git commit: AMBARI-15103: Move HAWQ system password to General Accordion tab (mithmatt via jaoki)

Posted by nc...@apache.org.
AMBARI-15103: Move HAWQ system password to General Accordion tab (mithmatt via jaoki)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/d741e438
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/d741e438
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/d741e438

Branch: refs/heads/branch-dev-patch-upgrade
Commit: d741e438db58bcea2d59f902ccf9d7ad40097b7f
Parents: 49813c9
Author: Jun Aoki <ja...@apache.org>
Authored: Mon Feb 22 14:51:07 2016 -0800
Committer: Jun Aoki <ja...@apache.org>
Committed: Mon Feb 22 14:51:07 2016 -0800

----------------------------------------------------------------------
 .../common-services/HAWQ/2.0.0/configuration/hawq-env.xml     | 2 +-
 ambari-web/app/data/HDP2.3/site_properties.js                 | 7 +++++++
 2 files changed, 8 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/d741e438/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/configuration/hawq-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/configuration/hawq-env.xml b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/configuration/hawq-env.xml
index 22777f1..a948bae 100644
--- a/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/configuration/hawq-env.xml
+++ b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/configuration/hawq-env.xml
@@ -19,7 +19,7 @@
 
   <property>
     <name>hawq_password</name>
-    <display-name>HAWQ system user password</display-name>
+    <display-name>HAWQ System User Password</display-name>
     <value></value>
     <description>The password of HAWQ system user.
       During cluster initialization and when provisioning new hosts, ssh keys are exchanged using this password.

http://git-wip-us.apache.org/repos/asf/ambari/blob/d741e438/ambari-web/app/data/HDP2.3/site_properties.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/data/HDP2.3/site_properties.js b/ambari-web/app/data/HDP2.3/site_properties.js
index c5fb525..e706d40 100644
--- a/ambari-web/app/data/HDP2.3/site_properties.js
+++ b/ambari-web/app/data/HDP2.3/site_properties.js
@@ -340,6 +340,13 @@ hdp23properties.push({
     "index": 9
   },
   {
+    "name": "hawq_password",
+    "filename": "hawq-env.xml",
+    "category": "General",
+    "serviceName": "HAWQ",
+    "index": 10
+  },
+  {
     "name": "content",
     "serviceName": "HAWQ",
     "filename": "hawq-check-env.xml",


[02/12] ambari git commit: AMBARI-15044. HDFS API ignores fs.permissions.umask-mode property. (Gaurav Nagar via dipayanb)

Posted by nc...@apache.org.
AMBARI-15044. HDFS API ignores fs.permissions.umask-mode property. (Gaurav Nagar via dipayanb)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/b0ab893f
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/b0ab893f
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/b0ab893f

Branch: refs/heads/branch-dev-patch-upgrade
Commit: b0ab893ff09a014b704c84615c852fdb6b1d84e7
Parents: 1dbf7d4
Author: Dipayan Bhowmick <di...@gmail.com>
Authored: Mon Feb 22 22:43:50 2016 +0530
Committer: Dipayan Bhowmick <di...@gmail.com>
Committed: Mon Feb 22 22:44:52 2016 +0530

----------------------------------------------------------------------
 contrib/views/files/src/main/resources/view.xml             | 8 ++++++++
 contrib/views/hive/src/main/resources/view.xml              | 9 +++++++++
 contrib/views/pig/src/main/resources/view.xml               | 9 +++++++++
 .../apache/ambari/view/utils/hdfs/ConfigurationBuilder.java | 6 ++++++
 4 files changed, 32 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/b0ab893f/contrib/views/files/src/main/resources/view.xml
----------------------------------------------------------------------
diff --git a/contrib/views/files/src/main/resources/view.xml b/contrib/views/files/src/main/resources/view.xml
index f594fbe..adaec10 100644
--- a/contrib/views/files/src/main/resources/view.xml
+++ b/contrib/views/files/src/main/resources/view.xml
@@ -88,6 +88,14 @@
         <cluster-config>fake</cluster-config>
     </parameter>
     <parameter>
+        <name>hdfs.umask-mode</name>
+        <description>The umask used when creating files and directories. Defaults to 022</description>
+        <label>Umask</label>
+        <default-value>022</default-value>
+        <required>false</required>
+        <cluster-config>hdfs-site/fs.permissions.umask-mode</cluster-config>
+    </parameter>
+    <parameter>
         <name>hdfs.auth_to_local</name>
         <description>Auth to Local Configuration</description>
         <label>Auth To Local</label>

http://git-wip-us.apache.org/repos/asf/ambari/blob/b0ab893f/contrib/views/hive/src/main/resources/view.xml
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/resources/view.xml b/contrib/views/hive/src/main/resources/view.xml
index e3aea70..892f044 100644
--- a/contrib/views/hive/src/main/resources/view.xml
+++ b/contrib/views/hive/src/main/resources/view.xml
@@ -167,6 +167,15 @@
     </parameter>
 
     <parameter>
+        <name>hdfs.umask-mode</name>
+        <description>The umask used when creating files and directories. Defaults to 022</description>
+        <label>Umask</label>
+        <default-value>022</default-value>
+        <required>false</required>
+        <cluster-config>hdfs-site/fs.permissions.umask-mode</cluster-config>
+    </parameter>
+
+    <parameter>
         <name>hdfs.auth_to_local</name>
         <description>Auth to Local Configuration</description>
         <label>Auth To Local</label>

http://git-wip-us.apache.org/repos/asf/ambari/blob/b0ab893f/contrib/views/pig/src/main/resources/view.xml
----------------------------------------------------------------------
diff --git a/contrib/views/pig/src/main/resources/view.xml b/contrib/views/pig/src/main/resources/view.xml
index 8dd4a4f..9df91f8 100644
--- a/contrib/views/pig/src/main/resources/view.xml
+++ b/contrib/views/pig/src/main/resources/view.xml
@@ -92,6 +92,15 @@
     </parameter>
 
     <parameter>
+        <name>hdfs.umask-mode</name>
+        <description>The umask used when creating files and directories. Defaults to 022</description>
+        <label>Umask</label>
+        <default-value>022</default-value>
+        <required>false</required>
+        <cluster-config>hdfs-site/fs.permissions.umask-mode</cluster-config>
+    </parameter>
+
+    <parameter>
         <name>webhdfs.username</name>
         <description>User and doAs for proxy user for HDFS. By default, uses the currently logged-in Ambari user.</description>
         <label>WebHDFS Username</label>

http://git-wip-us.apache.org/repos/asf/ambari/blob/b0ab893f/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/ConfigurationBuilder.java
----------------------------------------------------------------------
diff --git a/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/ConfigurationBuilder.java b/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/ConfigurationBuilder.java
index a739c84..121fe84 100644
--- a/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/ConfigurationBuilder.java
+++ b/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/ConfigurationBuilder.java
@@ -63,6 +63,9 @@ public class ConfigurationBuilder {
   public static final String FAILOVER_PROXY_PROVIDER_INSTANCE_PROPERTY = "webhdfs.client.failover.proxy.provider";
   public static final String FAILOVER_PROXY_PROVIDER_CLUSTER_PROPERTY  = "dfs.client.failover.proxy.provider.%s";
 
+  public static final String UMASK_CLUSTER_PROPERTY = "fs.permissions.umask-mode";
+  public static final String UMASK_INSTANCE_PROPERTY = "hdfs.umask-mode";
+
   private Configuration conf = new Configuration();
   private ViewContext context;
   private AmbariApi ambariApi = null;
@@ -224,6 +227,9 @@ public class ConfigurationBuilder {
     parseProperties();
     setAuthParams(buildAuthenticationConfig());
 
+    String umask = context.getProperties().get(UMASK_INSTANCE_PROPERTY);
+    if(umask != null && !umask.isEmpty()) conf.set(UMASK_CLUSTER_PROPERTY,umask);
+
     conf.set("fs.hdfs.impl", DistributedFileSystem.class.getName());
     conf.set("fs.webhdfs.impl", WebHdfsFileSystem.class.getName());
     conf.set("fs.file.impl", LocalFileSystem.class.getName());


[05/12] ambari git commit: AMBARI-15061: PXF Service checks fails with timeout (bhuvnesh2703 via jaoki)

Posted by nc...@apache.org.
AMBARI-15061: PXF Service checks fails with timeout (bhuvnesh2703 via jaoki)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/49813c98
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/49813c98
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/49813c98

Branch: refs/heads/branch-dev-patch-upgrade
Commit: 49813c987a491d1527f6499927d22746a11f3631
Parents: ecd6a30
Author: Jun Aoki <ja...@apache.org>
Authored: Mon Feb 22 14:44:07 2016 -0800
Committer: Jun Aoki <ja...@apache.org>
Committed: Mon Feb 22 14:44:07 2016 -0800

----------------------------------------------------------------------
 .../PXF/3.0.0/package/scripts/params.py         |   1 +
 .../PXF/3.0.0/package/scripts/pxf_constants.py  |   3 +
 .../PXF/3.0.0/package/scripts/service_check.py  | 167 ++++++++++---------
 3 files changed, 96 insertions(+), 75 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/49813c98/ambari-server/src/main/resources/common-services/PXF/3.0.0/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/PXF/3.0.0/package/scripts/params.py b/ambari-server/src/main/resources/common-services/PXF/3.0.0/package/scripts/params.py
index b3e85e4..1dbed45 100644
--- a/ambari-server/src/main/resources/common-services/PXF/3.0.0/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/PXF/3.0.0/package/scripts/params.py
@@ -42,6 +42,7 @@ tomcat_group = "tomcat"
 # Directories
 pxf_conf_dir = "/etc/pxf/conf"
 pxf_instance_dir = "/var/pxf"
+exec_tmp_dir = Script.get_tmp_dir()
 
 # Java home path
 java_home = config["hostLevelParams"]["java_home"] if "java_home" in config["hostLevelParams"] else None

http://git-wip-us.apache.org/repos/asf/ambari/blob/49813c98/ambari-server/src/main/resources/common-services/PXF/3.0.0/package/scripts/pxf_constants.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/PXF/3.0.0/package/scripts/pxf_constants.py b/ambari-server/src/main/resources/common-services/PXF/3.0.0/package/scripts/pxf_constants.py
index 9d93a38..1d88893 100644
--- a/ambari-server/src/main/resources/common-services/PXF/3.0.0/package/scripts/pxf_constants.py
+++ b/ambari-server/src/main/resources/common-services/PXF/3.0.0/package/scripts/pxf_constants.py
@@ -24,4 +24,7 @@ pxf_hdfs_test_dir = "/pxf_hdfs_smoke_test"
 pxf_hdfs_read_test_file = pxf_hdfs_test_dir + "/pxf_smoke_test_read_data"
 pxf_hdfs_write_test_file = pxf_hdfs_test_dir + "/pxf_smoke_test_write_data"
 pxf_hbase_test_table = "pxf_hbase_smoke_test_table"
+hbase_populate_data_script = "hbase-populate-data.sh"
+hbase_cleanup_data_script = "hbase-cleanup-data.sh"
 pxf_hive_test_table = "pxf_hive_smoke_test_table"
+hive_populate_data_script = "hive-populate-data.hql"

http://git-wip-us.apache.org/repos/asf/ambari/blob/49813c98/ambari-server/src/main/resources/common-services/PXF/3.0.0/package/scripts/service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/PXF/3.0.0/package/scripts/service_check.py b/ambari-server/src/main/resources/common-services/PXF/3.0.0/package/scripts/service_check.py
index 21b7c5d..6f60661 100644
--- a/ambari-server/src/main/resources/common-services/PXF/3.0.0/package/scripts/service_check.py
+++ b/ambari-server/src/main/resources/common-services/PXF/3.0.0/package/scripts/service_check.py
@@ -16,18 +16,20 @@ See the License for the specific language governing permissions and
 limitations under the License.
 """
 import json
+import os
 
 from resource_management.libraries.script import Script
 from resource_management.core.exceptions import Fail
 from resource_management.core.logger import Logger
 from resource_management.core.system import System
-from resource_management.core.resources.system import Execute
+from resource_management.core.resources.system import Execute, File
 from resource_management.core.environment import Environment
 from resource_management.libraries.functions.curl_krb_request import curl_krb_request
+from resource_management.core.source import InlineTemplate
+from resource_management.libraries.functions.default import default
 from pxf_utils import makeHTTPCall, runLocalCmd
 import pxf_constants
 
-
 class PXFServiceCheck(Script):
   """
   Runs a set of simple PXF tests to verify if the service has been setup correctly
@@ -46,13 +48,12 @@ class PXFServiceCheck(Script):
     "X-GP-URL-HOST": pxf_constants.service_check_hostname
   }
 
-
   def service_check(self, env):
     """
     Runs the service check for PXF
     """
     import params
-    Logger.info("Starting PXF service checks..")
+    Logger.info("Starting PXF service checks")
     try:
       # Get delegation token if security is enabled
       if params.security_enabled:
@@ -66,33 +67,13 @@ class PXFServiceCheck(Script):
         self.run_hbase_tests()
       if params.is_hive_installed:
         self.run_hive_tests()
-    except:
-      msg = "PXF service check failed"
-      Logger.error(msg)
-      raise Fail(msg)
-    finally:
-      try:
-        self.cleanup_test_data()
-      except Exception as e:
-        Logger.error(e)
+    except Exception, ex:
+      Logger.error("Exception received during service check execution:\n{0}".format(ex))
+      Logger.error("PXF service check failed.")
+      raise
 
     Logger.info("Service check completed successfully")
 
-
-  def cleanup_test_data(self):
-    """
-    Cleans up the temporary test data generated for service check
-    """
-    Logger.info("Cleaning up PXF smoke check temporary data")
-
-    import params
-    self.__cleanup_hdfs_data()
-    if params.is_hbase_installed:
-      self.__cleanup_hbase_data()
-    if params.is_hive_installed:
-      self.__cleanup_hive_data()
-
-
   def __get_pxf_protocol_version(self):
     """
     Gets the pxf protocol version number
@@ -113,7 +94,6 @@ class PXFServiceCheck(Script):
     Logger.error(msg)
     raise Fail(msg)
 
-
   def __check_pxf_read(self, headers):
     """
     Performs a generic PXF read
@@ -122,12 +102,13 @@ class PXFServiceCheck(Script):
     try:
       response = makeHTTPCall(url, headers)
       if not "PXFFragments" in response:
-        Logger.error("Unable to find PXFFragments in the response")
+        Logger.error("Unable to find PXFFragments in the response. Response received from the server:\n{0}".format(response))
         raise
     except:
       msg = "PXF data read failed"
       Logger.error(msg)
       raise Fail(msg)
+    Logger.info("PXF data read successful")
 
 
   def __get_delegation_token(self, user, keytab, principal, kinit_path):
@@ -153,12 +134,15 @@ class PXFServiceCheck(Script):
     """
     Runs a set of PXF HDFS checks
     """
-    Logger.info("Running PXF HDFS checks")
+    Logger.info("Running PXF HDFS service checks")
     self.__check_if_client_exists("Hadoop-HDFS")
     self.__cleanup_hdfs_data()
-    self.__write_hdfs_data()
-    self.__check_pxf_hdfs_read()
-    self.__check_pxf_hdfs_write()
+    try:
+      self.__write_hdfs_data()
+      self.__check_pxf_hdfs_read()
+      self.__check_pxf_hdfs_write()
+    finally:
+      self.__cleanup_hdfs_data()
 
   def __write_hdfs_data(self):
     """
@@ -235,32 +219,52 @@ class PXFServiceCheck(Script):
     )
     params.HdfsResource(None, action="execute")
 
-
   # HBase Routines
   def run_hbase_tests(self):
     """
     Runs a set of PXF HBase checks
     """
     import params
-    Logger.info("Running PXF HBase checks")
-    if params.security_enabled:
-      Execute("{0} -kt {1} {2}".format(params.kinit_path_local, params.hbase_user_keytab, params.hbase_principal_name),
-              user = params.hbase_user)
-    self.__cleanup_hbase_data()
+    Logger.info("Running PXF HBase service checks")
     self.__check_if_client_exists("HBase")
-    self.__write_hbase_data()
-    self.__check_pxf_hbase_read()
+    self.__create_hbase_scripts()
+    kinit_cmd = "{0} -kt {1} {2};".format(params.kinit_path_local, params.hbase_user_keytab, params.hbase_principal_name) if params.security_enabled else ""
+    try:
+      message = "Creating temporary HBase smoke test table with data"
+      self.__run_hbase_script(pxf_constants.hbase_populate_data_script, kinit_cmd, message)
+      self.__check_pxf_hbase_read()
+    finally:
+      message = "Cleaning up HBase smoke test table"
+      self.__run_hbase_script(pxf_constants.hbase_cleanup_data_script, kinit_cmd, message)
 
-  def __write_hbase_data(self):
+  def __create_hbase_scripts(self):
     """
-    Creates a temporary HBase table for the service checks
+    Create file holding hbase commands
     """
     import params
-    Logger.info("Creating temporary HBase test data")
-    cmd = "echo \"create '{0}', 'cf'\" | hbase shell".format(pxf_constants.pxf_hbase_test_table)
-    Execute(cmd, logoutput = True, user = params.hbase_user)
-    cmd = "echo \"put '{0}', 'row1', 'cf:a', 'value1'; put '{0}', 'row1', 'cf:b', 'value2'\" | hbase shell".format(pxf_constants.pxf_hbase_test_table)
-    Execute(cmd, logoutput = True, user = params.hbase_user)
+    hbase_populate_data_cmds = "disable '{0}'\n" \
+                               "drop '{0}'\n" \
+                               "create '{0}', 'cf'\n" \
+                               "put '{0}', 'row1', 'cf:a', 'value1'\n" \
+                               "put '{0}', 'row1', 'cf:b', 'value2'".format(pxf_constants.pxf_hbase_test_table)
+
+    File("{0}".format(os.path.join(params.exec_tmp_dir, pxf_constants.hbase_populate_data_script)),
+         content=InlineTemplate("{0}".format(hbase_populate_data_cmds)))
+
+    hbase_cleanup_data_cmds = "disable '{0}'\n" \
+                              "drop '{0}'".format(pxf_constants.pxf_hbase_test_table)
+
+    File("{0}".format(os.path.join(params.exec_tmp_dir, pxf_constants.hbase_cleanup_data_script)),
+         content=InlineTemplate("{0}".format(hbase_cleanup_data_cmds)))
+
+  def __run_hbase_script(self, script, kinit_cmd, message):
+    """
+    Executes hbase shell command
+    """
+    import params
+    Logger.info(message)
+    hbase_shell_cmd = "{0} hbase shell {1}".format(kinit_cmd, os.path.join(params.exec_tmp_dir, script))
+    Execute(hbase_shell_cmd, user=params.hbase_user, logoutput=True)
 
   def __check_pxf_hbase_read(self):
     """
@@ -274,37 +278,53 @@ class PXFServiceCheck(Script):
     headers.update(self.commonPXFHeaders)
     self.__check_pxf_read(headers)
 
-  def __cleanup_hbase_data(self):
-    """
-    Cleans up the test HBase data
-    """
-    import params
-    Logger.info("Cleaning up HBase test data")
-    cmd = "echo \"disable '{0}'\" | hbase shell > /dev/null 2>&1".format(pxf_constants.pxf_hbase_test_table)
-    Execute(cmd, logoutput = True, user = params.hbase_user)
-    cmd = "echo \"drop '{0}'\" | hbase shell > /dev/null 2>&1".format(pxf_constants.pxf_hbase_test_table)
-    Execute(cmd, logoutput = True, user = params.hbase_user)
-
-
   # Hive Routines
   def run_hive_tests(self):
     """
     Runs a set of PXF Hive checks
     """
-    Logger.info("Running PXF Hive checks")
+    import params
+    Logger.info("Running PXF Hive service checks")
     self.__check_if_client_exists("Hive")
-    self.__cleanup_hive_data()
-    self.__write_hive_data()
-    self.__check_pxf_hive_read()
 
-  def __write_hive_data(self):
+    # Create file holding hive query commands
+    hive_populate_data_cmds = "DROP TABLE IF EXISTS {0};\n" \
+                         "CREATE TABLE {0} (id INT);\n" \
+                         "INSERT INTO {0} VALUES (1);".format(pxf_constants.pxf_hive_test_table)
+    File("{0}/{1}".format(params.exec_tmp_dir, pxf_constants.hive_populate_data_script),
+         content=InlineTemplate("{0}".format(hive_populate_data_cmds)))
+
+    # Get the parameters required to create jdbc url for beeline
+    hive_server_port = default("/configurations/hive-site/hive.server2.thrift.port", None)
+    hive_server_host = default("/clusterHostInfo/hive_server_host", None)
+    if hive_server_host is None or hive_server_port is None:
+      raise Fail("Input parameters are invalid for beeline connection string, both hive_server_host and " \
+            "hive.server2.thrift.port should be not None. Current values are:\nhive_server_host={0}\n" \
+            "hive.server2.thrift.port={1}".format(hive_server_host, hive_server_port))
+    jdbc_url = "jdbc:hive2://{0}:{1}/default".format(hive_server_host[0], hive_server_port)
+    beeline_conn_cmd = "beeline -u '{0}'".format(jdbc_url)
+
+    if params.security_enabled:
+      hive_server_principal = default('/configurations/hive-site/hive.server2.authentication.kerberos.principal', None)
+      if hive_server_principal is None:
+        raise Fail("Input parameter invalid for beeline connection string, hive.server2.authentication.kerberos.principal " \
+              "should be not None")
+      beeline_conn_cmd = "beeline -u '{0};principal={1}'".format(jdbc_url, hive_server_principal)
+
+    try:
+      self.__write_hive_data(beeline_conn_cmd)
+      self.__check_pxf_hive_read()
+    finally:
+      self.__cleanup_hive_data(beeline_conn_cmd)
+
+  def __write_hive_data(self, beeline_conn_cmd):
     """
     Creates a temporary Hive table for the service checks
     """
     import params
-    Logger.info("Creating temporary Hive test data")
-    cmd = "hive -e 'CREATE TABLE IF NOT EXISTS {0} (id INT); INSERT INTO {0} VALUES (1);'".format(pxf_constants.pxf_hive_test_table)
-    Execute(cmd, logoutput = True, user = params.hdfs_user)
+    Logger.info("Creating temporary Hive smoke test table with data")
+    cmd = "{0} -f {1}".format(beeline_conn_cmd, os.path.join(params.exec_tmp_dir, pxf_constants.hive_populate_data_script))
+    Execute(cmd, logoutput=True, user=params.hdfs_user)
 
   def __check_pxf_hive_read(self):
     """
@@ -318,15 +338,14 @@ class PXFServiceCheck(Script):
     headers.update(self.commonPXFHeaders)
     self.__check_pxf_read(headers)
 
-  def __cleanup_hive_data(self):
+  def __cleanup_hive_data(self, beeline_conn_cmd):
     """
     Cleans up the test Hive data
     """
     import params
     Logger.info("Cleaning up Hive test data")
-    cmd = "hive -e 'DROP TABLE IF EXISTS {0};'".format(pxf_constants.pxf_hive_test_table)
-    Execute(cmd, logoutput = True, user = params.hdfs_user)
-
+    cmd = "{0} -e 'DROP TABLE IF EXISTS {1};'".format(beeline_conn_cmd, pxf_constants.pxf_hive_test_table)
+    Execute(cmd, logoutput=True, user=params.hdfs_user)
 
   # Package Routines
   def __package_exists(self, pkg):
@@ -338,7 +357,6 @@ class PXFServiceCheck(Script):
     else:
       return not runLocalCmd("yum list installed | egrep -i ^" + pkg)
 
-
   def __check_if_client_exists(self, serviceName):
     Logger.info("Checking if " + serviceName + " client libraries exist")
     if not self.__package_exists(serviceName):
@@ -349,4 +367,3 @@ class PXFServiceCheck(Script):
 
 if __name__ == "__main__":
   PXFServiceCheck().execute()
-


[07/12] ambari git commit: AMBARI-15096 Edit WEB Alert should show Critical for Connection Timeout (Joe Wang via rzang)

Posted by nc...@apache.org.
AMBARI-15096 Edit WEB Alert should show Critical for Connection Timeout (Joe Wang via rzang)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/7091250a
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/7091250a
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/7091250a

Branch: refs/heads/branch-dev-patch-upgrade
Commit: 7091250a932bbb5cd4edc2d4c421f64abc710a13
Parents: d741e43
Author: Richard Zang <rz...@apache.org>
Authored: Mon Feb 22 16:00:54 2016 -0800
Committer: Richard Zang <rz...@apache.org>
Committed: Mon Feb 22 16:00:54 2016 -0800

----------------------------------------------------------------------
 .../main/alerts/definition_configs_controller.js    | 16 ++++++++++++++--
 ambari-web/app/models/alerts/alert_config.js        | 13 -------------
 ambari-web/app/styles/alerts.less                   |  5 +++--
 .../main/alerts/configs/alert_config_parameter.hbs  | 10 +++++-----
 4 files changed, 22 insertions(+), 22 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/7091250a/ambari-web/app/controllers/main/alerts/definition_configs_controller.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/controllers/main/alerts/definition_configs_controller.js b/ambari-web/app/controllers/main/alerts/definition_configs_controller.js
index 1b7fecf..e9f6726 100644
--- a/ambari-web/app/controllers/main/alerts/definition_configs_controller.js
+++ b/ambari-web/app/controllers/main/alerts/definition_configs_controller.js
@@ -16,6 +16,8 @@
  * limitations under the License.
  */
 
+var numericUtils = require('utils/number_utils');
+
 App.MainAlertDefinitionConfigsController = Em.Controller.extend({
 
   name: 'mainAlertDefinitionConfigsController',
@@ -289,8 +291,18 @@ App.MainAlertDefinitionConfigsController = Em.Controller.extend({
         text: isWizard ? '' : this.getThresholdsProperty('critical', 'text'),
         value: isWizard ? '' : this.getThresholdsProperty('critical', 'value')
       }),
-      App.AlertConfigProperties.ConnectionTimeout.create({
-        value: alertDefinition.get('uri.connectionTimeout')
+      App.AlertConfigProperties.Parameter.create({
+        value: alertDefinition.get('uri.connectionTimeout'),
+        threshold: "CRITICAL",
+        name: 'connection_timeout',
+        label: 'Connection Timeout',
+        displayType: 'parameter',
+        apiProperty: 'source.uri.connection_timeout',
+        units: 'Seconds',
+        isValid: function () {
+          var value = this.get('value');
+          return numericUtils.isPositiveNumber(value);
+        }.property('value')
       })
     ]);
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/7091250a/ambari-web/app/models/alerts/alert_config.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/models/alerts/alert_config.js b/ambari-web/app/models/alerts/alert_config.js
index 867b45a..2ef21a6 100644
--- a/ambari-web/app/models/alerts/alert_config.js
+++ b/ambari-web/app/models/alerts/alert_config.js
@@ -431,19 +431,6 @@ App.AlertConfigProperties = {
     }.property('value')
   }),
 
-  ConnectionTimeout: App.AlertConfigProperty.extend({
-    name: 'connection_timeout',
-    label: 'Connection Timeout',
-    displayType: 'textField',
-    classNames: 'alert-connection-timeout',
-    apiProperty: 'source.uri.connection_timeout',
-    unit: 'Seconds',
-    isValid: function () {
-      var value = this.get('value');
-      return numericUtils.isPositiveNumber(value);
-    }.property('value')
-  }),
-
   DefaultPort: App.AlertConfigProperty.extend({
     name: 'default_port',
     label: 'Default Port',

http://git-wip-us.apache.org/repos/asf/ambari/blob/7091250a/ambari-web/app/styles/alerts.less
----------------------------------------------------------------------
diff --git a/ambari-web/app/styles/alerts.less b/ambari-web/app/styles/alerts.less
index 1b8d5f4..d4e5d27 100644
--- a/ambari-web/app/styles/alerts.less
+++ b/ambari-web/app/styles/alerts.less
@@ -324,7 +324,7 @@
     padding-right: 13px;
   }
 
-  .alert-interval-input, .alert-connection-timeout {
+  .alert-interval-input {
     input {
       width: 20%;
     }
@@ -338,7 +338,8 @@
 
   .badge-container {
     height: 26px;
-    .alert-threshold-badge {
+    .alert-threshold-badge,
+    .alert-parameter-badge {
       line-height: 26px;
       height: 26px;
     }

http://git-wip-us.apache.org/repos/asf/ambari/blob/7091250a/ambari-web/app/templates/main/alerts/configs/alert_config_parameter.hbs
----------------------------------------------------------------------
diff --git a/ambari-web/app/templates/main/alerts/configs/alert_config_parameter.hbs b/ambari-web/app/templates/main/alerts/configs/alert_config_parameter.hbs
index 608dc59..c7364e9 100644
--- a/ambari-web/app/templates/main/alerts/configs/alert_config_parameter.hbs
+++ b/ambari-web/app/templates/main/alerts/configs/alert_config_parameter.hbs
@@ -17,13 +17,13 @@
 }}
 
 <div>
-  {{#if view.property.threshold}}
-    <div class="span2 badge-container">
+  <div class="span2 badge-container">
+    {{#if view.property.threshold}}
       <span {{bindAttr class="view.property.badgeCssClass :alert-parameter-badge :alert-state-single-host view.property.threshold:label"}}>
         {{view.property.badge}}
-        </span>&nbsp;
-    </div>
-  {{/if}}
+      </span>&nbsp;
+    {{/if}}
+  </div>
   <div rel="parameter-tooltip" {{bindAttr data-original-title="view.property.description" class="view.bigInput:span12:span3 view.property.units:input-append view.property.thresholdNotExists:stuck-left"}}>
     {{view Em.TextField valueBinding="view.property.value" disabledBinding="view.property.isDisabled" class ="view.bigInput:span12:span7"}}
     {{#if view.property.units}}


[04/12] ambari git commit: AMBARI-15116. HAWQSEGMENT should be unchecked on host which does not have DataNode(bhuvnesh chaudhary via odiachenko).

Posted by nc...@apache.org.
AMBARI-15116. HAWQSEGMENT should be unchecked on host which does not have DataNode(bhuvnesh chaudhary via odiachenko).


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/ecd6a305
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/ecd6a305
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/ecd6a305

Branch: refs/heads/branch-dev-patch-upgrade
Commit: ecd6a3057dafe7adb12e46e7348fea5d912635a6
Parents: 9dfe929
Author: Oleksandr Diachenko <od...@pivotal.io>
Authored: Mon Feb 22 13:25:41 2016 -0800
Committer: Oleksandr Diachenko <od...@pivotal.io>
Committed: Mon Feb 22 13:25:41 2016 -0800

----------------------------------------------------------------------
 .../stacks/HDP/2.3/services/stack_advisor.py    |   2 +
 .../stacks/2.3/common/test_stack_advisor.py     | 103 ++++++++++++++++++-
 2 files changed, 102 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/ecd6a305/ambari-server/src/main/resources/stacks/HDP/2.3/services/stack_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/services/stack_advisor.py b/ambari-server/src/main/resources/stacks/HDP/2.3/services/stack_advisor.py
index 34e4cfa..11adca5 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/services/stack_advisor.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/services/stack_advisor.py
@@ -40,6 +40,8 @@ class HDP23StackAdvisor(HDP22StackAdvisor):
       for host_group in hostGroups:
         if {"name": "DATANODE"} in host_group["components"] and {"name": "HAWQSEGMENT"} not in host_group["components"]:
           host_group["components"].append({"name": "HAWQSEGMENT"})
+        if {"name": "DATANODE"} not in host_group["components"] and {"name": "HAWQSEGMENT"} in host_group["components"]:
+          host_group["components"].remove({"name": "HAWQSEGMENT"})
 
     # co-locate PXF with NAMENODE and DATANODE
     if "PXF" in servicesList:

http://git-wip-us.apache.org/repos/asf/ambari/blob/ecd6a305/ambari-server/src/test/python/stacks/2.3/common/test_stack_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.3/common/test_stack_advisor.py b/ambari-server/src/test/python/stacks/2.3/common/test_stack_advisor.py
index 03ae6cc..690c053 100644
--- a/ambari-server/src/test/python/stacks/2.3/common/test_stack_advisor.py
+++ b/ambari-server/src/test/python/stacks/2.3/common/test_stack_advisor.py
@@ -58,6 +58,13 @@ class TestHDP23StackAdvisor(TestCase):
       data = json.load(f)
     return data
 
+  def prepareHosts(self, hostsNames):
+    hosts = { "items": [] }
+    for hostName in hostsNames:
+      nextHost = {"Hosts":{"host_name" : hostName}}
+      hosts["items"].append(nextHost)
+    return hosts
+
   @patch('__builtin__.open')
   @patch('os.path.exists')
   def get_system_min_uid_magic(self, exists_mock, open_mock):
@@ -168,15 +175,105 @@ class TestHDP23StackAdvisor(TestCase):
   def test_hawqsegmentDatanode(self):
     """ Test that HAWQSegment gets recommended on same host group which has DATANODE"""
 
-    services = self.load_json("services-hawq-pxf-hdfs.json")
-    hosts = self.load_json("hosts-3-hosts.json")
-    recommendations = self.stackAdvisor.createComponentLayoutRecommendations(services, hosts)
+    # Case 1: HDFS is already installed, HAWQ is being added during Add Service Wizard
+    services =  {
+                  "services" : [
+                    {
+                      "StackServices" : {
+                        "service_name" : "HDFS"
+                      },
+                      "components" : [
+                        {
+                          "StackServiceComponents" : {
+                            "cardinality" : "1+",
+                            "component_category" : "SLAVE",
+                            "component_name" : "DATANODE",
+                            "hostnames" : [ "c6401.ambari.apache.org" ]
+                          }
+                        }
+                      ]
+                    },
+                    {
+                      "StackServices" : {
+                        "service_name" : "HAWQ"
+                      },
+                      "components" : [
+                        {
+                          "StackServiceComponents" : {
+                            "cardinality" : "1+",
+                            "component_category" : "SLAVE",
+                            "component_name" : "HAWQSEGMENT",
+                            "hostnames" : [ ]
+                          }
+                        }
+                      ]
+                    }
+                  ]
+                }
+
+    # Cluster has 2 hosts
+    hosts = self.prepareHosts(["c6401.ambari.apache.org", "c6402.ambari.apache.org"])
 
+    recommendations = self.stackAdvisor.createComponentLayoutRecommendations(services, hosts)
+    """
+    Recommendations is as below:
+                                  {
+                                    'blueprint':{
+                                      'host_groups':[
+                                        {
+                                          'name':'host-group-1',
+                                          'components':[ ]
+                                        },
+                                        {
+                                          'name':'host-group-2',
+                                          'components':[
+                                            {
+                                              'name':'DATANODE'
+                                            },
+                                            {
+                                              'name':'HAWQSEGMENT'
+                                            }
+                                          ]
+                                        }
+                                      ]
+                                    },
+                                      'blueprint_cluster_binding':{
+                                        'host_groups':[
+                                          {
+                                            'hosts':[
+                                              {
+                                                'fqdn':'c6402.ambari.apache.org'
+                                              }
+                                            ],
+                                            'name':'host-group-1'
+                                          },
+                                          {
+                                            'hosts':[
+                                              {
+                                                'fqdn':'c6401.ambari.apache.org'
+                                              }
+                                            ],
+                                            'name':'host-group-2'
+                                          }
+                                        ]
+                                      }
+                                    }
+    """
     for hostgroup in recommendations["blueprint"]["host_groups"]:
       component_names = [component["name"] for component in hostgroup["components"]]
       if 'DATANODE' in component_names:
         self.assertTrue('HAWQSEGMENT' in component_names)
+      if 'DATANODE' not in component_names:
+        self.assertTrue('HAWQSEGMENT' not in component_names)
 
+    # Case 2: HDFS and HAWQ are being installed on a fresh cluster, HAWQSEGMENT and DATANODE must be recommended on all the host groups
+    # Update HDFS hostnames to empty list
+    services["services"][0]["components"][0]["StackServiceComponents"]["hostnames"] = []
+    recommendations = self.stackAdvisor.createComponentLayoutRecommendations(services, hosts)
+    for hostgroup in recommendations["blueprint"]["host_groups"]:
+      component_names = [component["name"] for component in hostgroup["components"]]
+      self.assertTrue('HAWQSEGMENT' in component_names)
+      self.assertTrue('DATANODE' in component_names)
 
   def fqdn_mock_result(value=None):
       return 'c6401.ambari.apache.org' if value is None else value


[08/12] ambari git commit: AMBARI-15124: Can't install ambari-server in Docker environment (masatana via jaoki)

Posted by nc...@apache.org.
AMBARI-15124: Can't install ambari-server in Docker environment (masatana via jaoki)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/0682fb60
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/0682fb60
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/0682fb60

Branch: refs/heads/branch-dev-patch-upgrade
Commit: 0682fb603f96bf9ded5333b19751011399c33099
Parents: 7091250
Author: Jun Aoki <ja...@apache.org>
Authored: Mon Feb 22 16:06:24 2016 -0800
Committer: Jun Aoki <ja...@apache.org>
Committed: Mon Feb 22 16:06:24 2016 -0800

----------------------------------------------------------------------
 dev-support/docker/docker/bin/ambaribuild.py | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/0682fb60/dev-support/docker/docker/bin/ambaribuild.py
----------------------------------------------------------------------
diff --git a/dev-support/docker/docker/bin/ambaribuild.py b/dev-support/docker/docker/bin/ambaribuild.py
index 50db0e2..dfb7d60 100755
--- a/dev-support/docker/docker/bin/ambaribuild.py
+++ b/dev-support/docker/docker/bin/ambaribuild.py
@@ -47,9 +47,9 @@ def buildAmbari(stack_distribution):
 	return proc.wait()
 
 def install_ambari_server():
-	proc = subprocess.Popen("sudo yum install -y ambari-server-*.noarch.rpm",
+	proc = subprocess.Popen("sudo yum install -y ambari-server-*.x86_64.rpm",
 			shell=True,
-			cwd="/tmp/ambari/ambari-server/target/rpm/ambari-server/RPMS/noarch")
+			cwd="/tmp/ambari/ambari-server/target/rpm/ambari-server/RPMS/x86_64")
 	return proc.wait()
 
 def install_ambari_agent():


[12/12] ambari git commit: Merge branch 'trunk' into branch-dev-patch-upgrade

Posted by nc...@apache.org.
Merge branch 'trunk' into branch-dev-patch-upgrade


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/50547c5e
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/50547c5e
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/50547c5e

Branch: refs/heads/branch-dev-patch-upgrade
Commit: 50547c5ed6076a6fdd18830b179cd28411288496
Parents: 55342fc 9310ab7
Author: Nate Cole <nc...@hortonworks.com>
Authored: Tue Feb 23 09:06:59 2016 -0500
Committer: Nate Cole <nc...@hortonworks.com>
Committed: Tue Feb 23 09:06:59 2016 -0500

----------------------------------------------------------------------
 .../server/upgrade/UpgradeCatalog222.java       |  19 +++
 .../HAWQ/2.0.0/configuration/hawq-env.xml       |   2 +-
 .../PXF/3.0.0/package/scripts/params.py         |   1 +
 .../PXF/3.0.0/package/scripts/pxf_constants.py  |   3 +
 .../PXF/3.0.0/package/scripts/service_check.py  | 167 ++++++++++---------
 .../stacks/HDP/2.2/services/stack_advisor.py    |   6 +-
 .../stacks/HDP/2.3/services/stack_advisor.py    |   2 +
 .../services/HIVE/configuration/hive-site.xml   |   6 +
 .../stacks/HDPWIN/2.2/services/stack_advisor.py |  35 +++-
 .../server/upgrade/UpgradeCatalog222Test.java   |  80 ++++++---
 .../stacks/2.2/common/test_stack_advisor.py     |   4 +-
 .../stacks/2.3/common/test_stack_advisor.py     | 103 +++++++++++-
 .../alerts/definition_configs_controller.js     |  16 +-
 ambari-web/app/controllers/main/host.js         |   2 -
 .../controllers/main/service/info/summary.js    |   7 +-
 ambari-web/app/data/HDP2.3/site_properties.js   |   7 +
 ambari-web/app/data/host/categories.js          |  11 --
 .../mappers/alert_definition_summary_mapper.js  |  23 +++
 ambari-web/app/models/alerts/alert_config.js    |  13 --
 ambari-web/app/models/host_component.js         |  12 ++
 ambari-web/app/styles/alerts.less               |  29 ++--
 .../alerts/configs/alert_config_parameter.hbs   |  10 +-
 ambari-web/app/templates/main/menu_item.hbs     |   5 -
 .../service/info/summary/master_components.hbs  |   8 +
 ambari-web/app/views/main/menu.js               |   8 +-
 contrib/views/files/src/main/resources/view.xml |   8 +
 .../jobs/ResultsPaginationController.java       |   7 +-
 contrib/views/hive/src/main/resources/view.xml  |   9 +
 contrib/views/pig/src/main/resources/view.xml   |   9 +
 .../ambari/view/tez/utils/ProxyHelper.java      |   7 +-
 .../view/utils/hdfs/ConfigurationBuilder.java   |   6 +
 dev-support/docker/docker/bin/ambaribuild.py    |   4 +-
 32 files changed, 452 insertions(+), 177 deletions(-)
----------------------------------------------------------------------



[11/12] ambari git commit: AMBARI-15130. Upgrade alert definition for App Timeline Web UI alert.(vbrodetskyi)

Posted by nc...@apache.org.
AMBARI-15130. Upgrade alert definition for App Timeline Web UI alert.(vbrodetskyi)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/9310ab70
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/9310ab70
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/9310ab70

Branch: refs/heads/branch-dev-patch-upgrade
Commit: 9310ab70ce178f6cd89d1be2be92081b26e2e832
Parents: 396b7da
Author: Vitaly Brodetskyi <vb...@hortonworks.com>
Authored: Tue Feb 23 14:15:58 2016 +0200
Committer: Vitaly Brodetskyi <vb...@hortonworks.com>
Committed: Tue Feb 23 14:15:58 2016 +0200

----------------------------------------------------------------------
 .../server/upgrade/UpgradeCatalog222.java       | 19 +++++
 .../server/upgrade/UpgradeCatalog222Test.java   | 80 +++++++++++++++-----
 2 files changed, 79 insertions(+), 20 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/9310ab70/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog222.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog222.java b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog222.java
index 2d0b556..b8d47f9 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog222.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog222.java
@@ -18,6 +18,8 @@
 
 package org.apache.ambari.server.upgrade;
 
+import com.google.gson.JsonObject;
+import com.google.gson.JsonParser;
 import com.google.inject.Inject;
 import com.google.inject.Injector;
 import org.apache.ambari.server.AmbariException;
@@ -160,10 +162,27 @@ public class UpgradeCatalog222 extends AbstractUpgradeCatalog {
       final AlertDefinitionEntity regionserverHealthSummaryDefinitionEntity = alertDefinitionDAO.findByName(
               clusterID, "regionservers_health_summary");
 
+      final AlertDefinitionEntity atsWebAlert = alertDefinitionDAO.findByName(
+              clusterID, "yarn_app_timeline_server_webui");
+
       if (regionserverHealthSummaryDefinitionEntity != null) {
         alertDefinitionDAO.remove(regionserverHealthSummaryDefinitionEntity);
       }
 
+      if (atsWebAlert != null) {
+        String source = atsWebAlert.getSource();
+        JsonObject sourceJson = new JsonParser().parse(source).getAsJsonObject();
+
+        JsonObject uriJson = sourceJson.get("uri").getAsJsonObject();
+        uriJson.remove("http");
+        uriJson.remove("https");
+        uriJson.addProperty("http", "{{yarn-site/yarn.timeline-service.webapp.address}}/ws/v1/timeline");
+        uriJson.addProperty("https", "{{yarn-site/yarn.timeline-service.webapp.https.address}}/ws/v1/timeline");
+
+        atsWebAlert.setSource(sourceJson.toString());
+        alertDefinitionDAO.merge(atsWebAlert);
+      }
+
     }
 
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/9310ab70/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog222Test.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog222Test.java b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog222Test.java
index 2b95cd9..8a7a679 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog222Test.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog222Test.java
@@ -19,26 +19,28 @@
 package org.apache.ambari.server.upgrade;
 
 
-import java.lang.reflect.Method;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-import javax.persistence.EntityManager;
-
+import com.google.common.collect.Maps;
+import com.google.gson.Gson;
+import com.google.inject.AbstractModule;
+import com.google.inject.Binder;
+import com.google.inject.Guice;
+import com.google.inject.Injector;
+import com.google.inject.Module;
+import com.google.inject.Provider;
+import com.google.inject.persist.PersistService;
+import org.apache.ambari.server.AmbariException;
 import org.apache.ambari.server.actionmanager.ActionManager;
 import org.apache.ambari.server.api.services.AmbariMetaInfo;
 import org.apache.ambari.server.controller.AmbariManagementController;
 import org.apache.ambari.server.controller.AmbariManagementControllerImpl;
-import org.apache.ambari.server.controller.ConfigurationRequest;
-import org.apache.ambari.server.controller.ConfigurationResponse;
 import org.apache.ambari.server.controller.KerberosHelper;
 import org.apache.ambari.server.controller.MaintenanceStateHelper;
 import org.apache.ambari.server.orm.DBAccessor;
 import org.apache.ambari.server.orm.GuiceJpaInitializer;
 import org.apache.ambari.server.orm.InMemoryDefaultTestModule;
+import org.apache.ambari.server.orm.dao.AlertDefinitionDAO;
 import org.apache.ambari.server.orm.dao.StackDAO;
+import org.apache.ambari.server.orm.entities.AlertDefinitionEntity;
 import org.apache.ambari.server.orm.entities.StackEntity;
 import org.apache.ambari.server.state.Cluster;
 import org.apache.ambari.server.state.Clusters;
@@ -53,16 +55,12 @@ import org.junit.After;
 import org.junit.Before;
 import org.junit.Test;
 
-import com.google.common.collect.Maps;
-import com.google.gson.Gson;
-import com.google.inject.AbstractModule;
-import com.google.inject.Binder;
-import com.google.inject.Guice;
-import com.google.inject.Injector;
-import com.google.inject.Module;
-import com.google.inject.Provider;
-import com.google.inject.persist.PersistService;
-import org.apache.ambari.server.AmbariException;
+import javax.persistence.EntityManager;
+import java.lang.reflect.Method;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
 
 import static org.easymock.EasyMock.anyObject;
 import static org.easymock.EasyMock.anyString;
@@ -148,6 +146,48 @@ public class UpgradeCatalog222Test {
   }
 
   @Test
+  public void testUpdateAlerts_ATSAlert() {
+    EasyMockSupport easyMockSupport = new EasyMockSupport();
+    final AmbariManagementController mockAmbariManagementController = easyMockSupport.createNiceMock(AmbariManagementController.class);
+    final Clusters mockClusters = easyMockSupport.createStrictMock(Clusters.class);
+    final Cluster mockClusterExpected = easyMockSupport.createNiceMock(Cluster.class);
+    final AlertDefinitionDAO mockAlertDefinitionDAO = easyMockSupport.createNiceMock(AlertDefinitionDAO.class);
+    final AlertDefinitionEntity mockATSWebAlert = easyMockSupport.createNiceMock(AlertDefinitionEntity.class);
+
+    final Injector mockInjector = Guice.createInjector(new AbstractModule() {
+      @Override
+      protected void configure() {
+        bind(AmbariManagementController.class).toInstance(mockAmbariManagementController);
+        bind(Clusters.class).toInstance(mockClusters);
+        bind(EntityManager.class).toInstance(entityManager);
+        bind(AlertDefinitionDAO.class).toInstance(mockAlertDefinitionDAO);
+        bind(DBAccessor.class).toInstance(createNiceMock(DBAccessor.class));
+        bind(OsFamily.class).toInstance(createNiceMock(OsFamily.class));
+      }
+    });
+
+    long clusterId = 1;
+
+    expect(mockAmbariManagementController.getClusters()).andReturn(mockClusters).once();
+    expect(mockClusters.getClusters()).andReturn(new HashMap<String, Cluster>() {{
+      put("normal", mockClusterExpected);
+    }}).atLeastOnce();
+    expect(mockClusterExpected.getClusterId()).andReturn(clusterId).anyTimes();
+    expect(mockAlertDefinitionDAO.findByName(eq(clusterId), eq("yarn_app_timeline_server_webui")))
+            .andReturn(mockATSWebAlert).atLeastOnce();
+    expect(mockATSWebAlert.getSource()).andReturn("{\"uri\": {\n" +
+            "            \"http\": \"{{yarn-site/yarn.timeline-service.webapp.address}}/ws/v1/timeline\",\n" +
+            "            \"https\": \"{{yarn-site/yarn.timeline-service.webapp.https.address}}/ws/v1/timeline\" } }");
+
+    mockATSWebAlert.setSource("{\"uri\":{\"http\":\"{{yarn-site/yarn.timeline-service.webapp.address}}/ws/v1/timeline\",\"https\":\"{{yarn-site/yarn.timeline-service.webapp.https.address}}/ws/v1/timeline\"}}");
+    expectLastCall().once();
+
+    easyMockSupport.replayAll();
+    mockInjector.getInstance(UpgradeCatalog222.class).updateAlerts();
+    easyMockSupport.verifyAll();
+  }
+
+  @Test
   public void testHiveSiteUpdateConfigs() throws AmbariException {
     EasyMockSupport easyMockSupport = new EasyMockSupport();
     final AmbariManagementController mockAmbariManagementController = easyMockSupport.createNiceMock(AmbariManagementController.class);


[10/12] ambari git commit: AMBARI-15048. ambari should make sure it uses only leaf queue name in configs (aonishuk)

Posted by nc...@apache.org.
AMBARI-15048. ambari should make sure it uses only leaf queue name in configs (aonishuk)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/396b7da7
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/396b7da7
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/396b7da7

Branch: refs/heads/branch-dev-patch-upgrade
Commit: 396b7da71b18a837416b67e6b8a29cde62265709
Parents: b1be75f
Author: Andrew Onishuk <ao...@hortonworks.com>
Authored: Tue Feb 23 12:42:06 2016 +0200
Committer: Andrew Onishuk <ao...@hortonworks.com>
Committed: Tue Feb 23 12:42:06 2016 +0200

----------------------------------------------------------------------
 .../stacks/HDP/2.2/services/stack_advisor.py    |  6 ++--
 .../services/HIVE/configuration/hive-site.xml   |  6 ++++
 .../stacks/HDPWIN/2.2/services/stack_advisor.py | 35 +++++++++++++++-----
 .../stacks/2.2/common/test_stack_advisor.py     |  4 +--
 4 files changed, 38 insertions(+), 13 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/396b7da7/ambari-server/src/main/resources/stacks/HDP/2.2/services/stack_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2/services/stack_advisor.py b/ambari-server/src/main/resources/stacks/HDP/2.2/services/stack_advisor.py
index cd25d77..6f0ee59 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.2/services/stack_advisor.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2/services/stack_advisor.py
@@ -380,7 +380,7 @@ class HDP22StackAdvisor(HDP21StackAdvisor):
     # Interactive Queues property attributes
     putHiveServerPropertyAttribute = self.putPropertyAttribute(configurations, "hiveserver2-site")
     toProcessQueues = yarn_queues.split(",")
-    leafQueues = []
+    leafQueueNames = set() # Remove duplicates
     while len(toProcessQueues) > 0:
       queue = toProcessQueues.pop()
       queueKey = "yarn.scheduler.capacity.root." + queue + ".queues"
@@ -391,7 +391,9 @@ class HDP22StackAdvisor(HDP21StackAdvisor):
           toProcessQueues.append(queue + "." + subQueue)
       else:
         # This is a leaf queue
-        leafQueues.append({"label": str(queue) + " queue", "value": queue})
+        queueName = queue.split(".")[-1] # Fully qualified queue name does not work, we should use only leaf name
+        leafQueueNames.add(queueName)
+    leafQueues = [{"label": str(queueName) + " queue", "value": queueName} for queueName in leafQueueNames]
     leafQueues = sorted(leafQueues, key=lambda q:q['value'])
     putHiveSitePropertyAttribute("hive.server2.tez.default.queues", "entries", leafQueues)
     putHiveSiteProperty("hive.server2.tez.default.queues", ",".join([leafQueue['value'] for leafQueue in leafQueues]))

http://git-wip-us.apache.org/repos/asf/ambari/blob/396b7da7/ambari-server/src/main/resources/stacks/HDPWIN/2.2/services/HIVE/configuration/hive-site.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDPWIN/2.2/services/HIVE/configuration/hive-site.xml b/ambari-server/src/main/resources/stacks/HDPWIN/2.2/services/HIVE/configuration/hive-site.xml
index be7c943..e2389b3 100644
--- a/ambari-server/src/main/resources/stacks/HDPWIN/2.2/services/HIVE/configuration/hive-site.xml
+++ b/ambari-server/src/main/resources/stacks/HDPWIN/2.2/services/HIVE/configuration/hive-site.xml
@@ -1309,6 +1309,12 @@ limitations under the License.
       </entries>
       <selection-cardinality>1+</selection-cardinality>
     </value-attributes>
+    <depends-on>
+      <property>
+        <type>capacity-scheduler</type>
+        <name>yarn.scheduler.capacity.root.queues</name>
+      </property>
+    </depends-on>
   </property>
 
   <property>

http://git-wip-us.apache.org/repos/asf/ambari/blob/396b7da7/ambari-server/src/main/resources/stacks/HDPWIN/2.2/services/stack_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDPWIN/2.2/services/stack_advisor.py b/ambari-server/src/main/resources/stacks/HDPWIN/2.2/services/stack_advisor.py
index 1ed4ce0..d560146 100644
--- a/ambari-server/src/main/resources/stacks/HDPWIN/2.2/services/stack_advisor.py
+++ b/ambari-server/src/main/resources/stacks/HDPWIN/2.2/services/stack_advisor.py
@@ -375,17 +375,34 @@ class HDPWIN22StackAdvisor(HDPWIN21StackAdvisor):
     putHiveSiteProperty("hive.server2.enable.doAs", "true")
 
     yarn_queues = "default"
-    if "capacity-scheduler" in configurations and \
-      "yarn.scheduler.capacity.root.queues" in configurations["capacity-scheduler"]["properties"]:
-      yarn_queues = str(configurations["capacity-scheduler"]["properties"]["yarn.scheduler.capacity.root.queues"])
-    putHiveSiteProperty("hive.server2.tez.default.queues", yarn_queues)
-
+    capacitySchedulerProperties = {}
+    if "capacity-scheduler" in services['configurations'] and "capacity-scheduler" in services['configurations']["capacity-scheduler"]["properties"]:
+      properties = str(services['configurations']["capacity-scheduler"]["properties"]["capacity-scheduler"]).split('\n')
+      for property in properties:
+        key,sep,value = property.partition("=")
+        capacitySchedulerProperties[key] = value
+    if "yarn.scheduler.capacity.root.queues" in capacitySchedulerProperties:
+      yarn_queues = str(capacitySchedulerProperties["yarn.scheduler.capacity.root.queues"])
     # Interactive Queues property attributes
     putHiveServerPropertyAttribute = self.putPropertyAttribute(configurations, "hiveserver2-site")
-    entries = []
-    for queue in yarn_queues.split(","):
-      entries.append({"label": str(queue) + " queue", "value": queue})
-    putHiveSitePropertyAttribute("hive.server2.tez.default.queues", "entries", entries)
+    toProcessQueues = yarn_queues.split(",")
+    leafQueueNames = set() # Remove duplicates
+    while len(toProcessQueues) > 0:
+      queue = toProcessQueues.pop()
+      queueKey = "yarn.scheduler.capacity.root." + queue + ".queues"
+      if queueKey in capacitySchedulerProperties:
+        # This is a parent queue - need to add children
+        subQueues = capacitySchedulerProperties[queueKey].split(",")
+        for subQueue in subQueues:
+          toProcessQueues.append(queue + "." + subQueue)
+      else:
+        # This is a leaf queue
+        queueName = queue.split(".")[-1] # Fully qualified queue name does not work, we should use only leaf name
+        leafQueueNames.add(queueName)
+    leafQueues = [{"label": str(queueName) + " queue", "value": queueName} for queueName in leafQueueNames]
+    leafQueues = sorted(leafQueues, key=lambda q:q['value'])
+    putHiveSitePropertyAttribute("hive.server2.tez.default.queues", "entries", leafQueues)
+    putHiveSiteProperty("hive.server2.tez.default.queues", ",".join([leafQueue['value'] for leafQueue in leafQueues]))
 
     # Security
     putHiveEnvProperty("hive_security_authorization", "None")

http://git-wip-us.apache.org/repos/asf/ambari/blob/396b7da7/ambari-server/src/test/python/stacks/2.2/common/test_stack_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.2/common/test_stack_advisor.py b/ambari-server/src/test/python/stacks/2.2/common/test_stack_advisor.py
index d230030..d95e6c7 100644
--- a/ambari-server/src/test/python/stacks/2.2/common/test_stack_advisor.py
+++ b/ambari-server/src/test/python/stacks/2.2/common/test_stack_advisor.py
@@ -1402,9 +1402,9 @@ class TestHDP22StackAdvisor(TestCase):
                                    "yarn.scheduler.capacity.root.default.user-limit-factor=1\n"
                                    "yarn.scheduler.capacity.root.queues=default"}
 
-    expected['hive-site']['properties']['hive.server2.tez.default.queues'] = 'default.a.a1,default.a.a2,default.b'
+    expected['hive-site']['properties']['hive.server2.tez.default.queues'] = 'a1,a2,b'
     expected['hive-site']['property_attributes']['hive.server2.tez.default.queues'] = {
-           'entries': [{'value': 'default.a.a1', 'label': 'default.a.a1 queue'}, {'value': 'default.a.a2', 'label': 'default.a.a2 queue'}, {'value': 'default.b', 'label': 'default.b queue'}]
+           'entries': [{'value': 'a1', 'label': 'a1 queue'}, {'value': 'a2', 'label': 'a2 queue'}, {'value': 'b', 'label': 'b queue'}]
           }
     self.stackAdvisor.recommendHIVEConfigurations(configurations, clusterData, services, hosts)
     self.assertEquals(configurations['hive-site']['property_attributes']['hive.server2.tez.default.queues'], expected['hive-site']['property_attributes']['hive.server2.tez.default.queues'])