You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by nc...@apache.org on 2015/12/01 19:19:51 UTC

[01/50] ambari git commit: AMBARI-14091. Postgres DB Test connection for Ranger DBA user does not work. (jaimin)

Repository: ambari
Updated Branches:
  refs/heads/branch-dev-patch-upgrade 47a1bcfea -> 6ae63efd6


AMBARI-14091. Postgres DB Test connection for Ranger DBA user does not work. (jaimin)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/db2ca773
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/db2ca773
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/db2ca773

Branch: refs/heads/branch-dev-patch-upgrade
Commit: db2ca773b5bd73f5aa1dee956d586c691c360ba1
Parents: b34cf6d
Author: Jaimin Jetly <ja...@hortonworks.com>
Authored: Thu Nov 26 20:21:41 2015 -0800
Committer: Jaimin Jetly <ja...@hortonworks.com>
Committed: Thu Nov 26 20:24:00 2015 -0800

----------------------------------------------------------------------
 .../src/main/resources/stacks/HDP/2.3/services/stack_advisor.py    | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/db2ca773/ambari-server/src/main/resources/stacks/HDP/2.3/services/stack_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/services/stack_advisor.py b/ambari-server/src/main/resources/stacks/HDP/2.3/services/stack_advisor.py
index 9fb9e24..419fb3a 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/services/stack_advisor.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/services/stack_advisor.py
@@ -442,7 +442,7 @@ class HDP23StackAdvisor(HDP22StackAdvisor):
         ranger_db_privelege_url_dict = {
           'MYSQL': {'ranger_privelege_user_jdbc_url': 'jdbc:mysql://' + rangerDbHost},
           'ORACLE': {'ranger_privelege_user_jdbc_url': 'jdbc:oracle:thin:@//' + rangerDbHost + ':1521'},
-          'POSTGRES': {'ranger_privelege_user_jdbc_url': 'jdbc:postgresql://' + rangerDbHost + ':5432'},
+          'POSTGRES': {'ranger_privelege_user_jdbc_url': 'jdbc:postgresql://' + rangerDbHost + ':5432/postgres'},
           'MSSQL': {'ranger_privelege_user_jdbc_url': 'jdbc:sqlserver://' + rangerDbHost + ';'},
           'SQLA': {'ranger_privelege_user_jdbc_url': 'jdbc:sqlanywhere:host=' + rangerDbHost + ';'}
         }


[27/50] ambari git commit: AMBARI-14113 Check memory consumption of hbase shell jvm launched to verify data model (dsen)

Posted by nc...@apache.org.
AMBARI-14113 Check memory consumption of hbase shell jvm launched to verify data model (dsen)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/edc8d14e
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/edc8d14e
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/edc8d14e

Branch: refs/heads/branch-dev-patch-upgrade
Commit: edc8d14ec38ad5c0c7a10c0576da60e60a670fb4
Parents: fd1181f
Author: Dmytro Sen <ds...@apache.org>
Authored: Mon Nov 30 21:12:37 2015 +0200
Committer: Dmytro Sen <ds...@apache.org>
Committed: Mon Nov 30 21:12:37 2015 +0200

----------------------------------------------------------------------
 .../org/apache/ambari/server/upgrade/UpgradeCatalog213.java    | 6 ++++++
 .../AMBARI_METRICS/0.1.0/configuration/ams-hbase-env.xml       | 4 ++--
 .../apache/ambari/server/upgrade/UpgradeCatalog213Test.java    | 6 ++++--
 3 files changed, 12 insertions(+), 4 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/edc8d14e/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog213.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog213.java b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog213.java
index a070935..3b61fa1 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog213.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog213.java
@@ -1187,6 +1187,12 @@ public class UpgradeCatalog213 extends AbstractUpgradeCatalog {
     regSearch = "\\{\\{regionserver_heapsize\\}\\}";
     replacement = "{{regionserver_heapsize}}m";
     content = content.replaceAll(regSearch, replacement);
+    regSearch = "export HBASE_HEAPSIZE=";
+    replacement = "#export HBASE_HEAPSIZE=";
+    content = content.replaceAll(regSearch, replacement);
+    content += "\n" +
+      "# The maximum amount of heap to use for hbase shell.\n" +
+      "export HBASE_SHELL_OPTS=\"-Xmx256m\"\n";
     return content;
   }
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/edc8d14e/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/configuration/ams-hbase-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/configuration/ams-hbase-env.xml b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/configuration/ams-hbase-env.xml
index b36275b..33bd713 100644
--- a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/configuration/ams-hbase-env.xml
+++ b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/configuration/ams-hbase-env.xml
@@ -159,8 +159,8 @@ else
   export HBASE_CLASSPATH=${HBASE_CLASSPATH}
 fi
 
-# The maximum amount of heap to use, in MB. Default is 1000.
-export HBASE_HEAPSIZE={{hbase_heapsize}}m
+# The maximum amount of heap to use for hbase shell.
+export HBASE_SHELL_OPTS="-Xmx256m"
 
 # Extra Java runtime options.
 # Below are what we set by default. May only work with SUN JVM.

http://git-wip-us.apache.org/repos/asf/ambari/blob/edc8d14e/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog213Test.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog213Test.java b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog213Test.java
index d8e7267..7cd835e 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog213Test.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog213Test.java
@@ -608,7 +608,7 @@ public class UpgradeCatalog213Test {
     String expectedContent = "export HBASE_CLASSPATH=${HBASE_CLASSPATH}\n" +
       "\n" +
       "# The maximum amount of heap to use, in MB. Default is 1000.\n" +
-      "export HBASE_HEAPSIZE={{hbase_heapsize}}m\n" +
+      "#export HBASE_HEAPSIZE={{hbase_heapsize}}m\n" +
       "\n" +
       "{% if java_version &lt; 8 %}\n" +
       "export HBASE_MASTER_OPTS=\" -XX:PermSize=64m -XX:MaxPermSize={{hbase_master_maxperm_size}}m -Xms{{hbase_heapsize}}m -Xmx{{hbase_heapsize}}m -Xmn{{hbase_master_xmn_size}}m -XX:CMSInitiatingOccupancyFraction=70 -XX:+UseCMSInitiatingOccupancyOnly\"\n" +
@@ -616,7 +616,9 @@ public class UpgradeCatalog213Test {
       "{% else %}\n" +
       "export HBASE_MASTER_OPTS=\" -Xms{{hbase_heapsize}}m -Xmx{{hbase_heapsize}}m -Xmn{{hbase_master_xmn_size}}m -XX:CMSInitiatingOccupancyFraction=70 -XX:+UseCMSInitiatingOccupancyOnly\"\n" +
       "export HBASE_REGIONSERVER_OPTS=\" -Xmn{{regionserver_xmn_size}}m -XX:CMSInitiatingOccupancyFraction=70 -XX:+UseCMSInitiatingOccupancyOnly -Xms{{regionserver_heapsize}}m -Xmx{{regionserver_heapsize}}m\"\n" +
-      "{% endif %}\n";
+      "{% endif %}\n\n" +
+      "# The maximum amount of heap to use for hbase shell.\n" +
+      "export HBASE_SHELL_OPTS=\"-Xmx256m\"\n";
     String result = (String) updateAmsHbaseEnvContent.invoke(upgradeCatalog213, oldContent);
     Assert.assertEquals(expectedContent, result);
   }


[43/50] ambari git commit: AMBARI-14126 Override icon doesn't show up when group is not default. (ababiichuk)

Posted by nc...@apache.org.
AMBARI-14126 Override icon doesn't show up when group is not default. (ababiichuk)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/5cc99b22
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/5cc99b22
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/5cc99b22

Branch: refs/heads/branch-dev-patch-upgrade
Commit: 5cc99b224c8870815f978b7bc631275843c3bb06
Parents: 95a71a6
Author: aBabiichuk <ab...@cybervisiontech.com>
Authored: Tue Dec 1 13:20:45 2015 +0200
Committer: aBabiichuk <ab...@cybervisiontech.com>
Committed: Tue Dec 1 13:26:28 2015 +0200

----------------------------------------------------------------------
 ambari-web/app/views/common/configs/controls_view.js | 8 ++++----
 1 file changed, 4 insertions(+), 4 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/5cc99b22/ambari-web/app/views/common/configs/controls_view.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views/common/configs/controls_view.js b/ambari-web/app/views/common/configs/controls_view.js
index 6232a27..78fd950 100644
--- a/ambari-web/app/views/common/configs/controls_view.js
+++ b/ambari-web/app/views/common/configs/controls_view.js
@@ -26,20 +26,20 @@ App.ControlsView = Ember.View.extend({
 	serviceConfigProperty: null,
 
 	showActions: function() {
-		return App.isAccessible('ADMIN') && this.get('serviceConfigProperty.isEditable') && this.get('serviceConfigProperty.isRequiredByAgent') && !this.get('serviceConfigProperty.isComparison');
+		return App.isAccessible('ADMIN') && this.get('serviceConfigProperty.isRequiredByAgent') && !this.get('serviceConfigProperty.isComparison');
 	}.property('serviceConfigProperty.isEditable', 'serviceConfigProperty.isRequiredByAgent', 'serviceConfigProperty.isComparison'),
 
 	showSwitchToGroup: Em.computed.and('!serviceConfigProperty.isEditable', 'serviceConfigProperty.group'),
 
 	showIsFinal: Em.computed.alias('serviceConfigProperty.supportsFinal'),
 
-	showRemove: Em.computed.and('showActions', 'serviceConfigProperty.isRemovable'),
+	showRemove: Em.computed.and('showActions', 'serviceConfigProperty.isEditable', 'serviceConfigProperty.isRemovable'),
 
 	showOverride: Em.computed.and('showActions', 'serviceConfigProperty.isPropertyOverridable'),
 
-	showUndo: Em.computed.and('showActions', '!serviceConfigProperty.cantBeUndone', 'serviceConfigProperty.isNotDefaultValue'),
+	showUndo: Em.computed.and('showActions', 'serviceConfigProperty.isEditable', '!serviceConfigProperty.cantBeUndone', 'serviceConfigProperty.isNotDefaultValue'),
 
-	showSetRecommended: Em.computed.and('showActions', 'serviceConfigProperty.recommendedValueExists')
+	showSetRecommended: Em.computed.and('showActions', 'serviceConfigProperty.isEditable', 'serviceConfigProperty.recommendedValueExists')
 
 });
 


[05/50] ambari git commit: AMBARI-14095. Upgrade: second click shows diff results

Posted by nc...@apache.org.
AMBARI-14095. Upgrade: second click shows diff results


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/b470e3e7
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/b470e3e7
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/b470e3e7

Branch: refs/heads/branch-dev-patch-upgrade
Commit: b470e3e7d9cd78b747f6cc08e100e9a2b8b709fc
Parents: 7312dbf
Author: Alex Antonenko <hi...@gmail.com>
Authored: Fri Nov 27 14:07:08 2015 +0200
Committer: Alex Antonenko <hi...@gmail.com>
Committed: Fri Nov 27 14:17:36 2015 +0200

----------------------------------------------------------------------
 ambari-web/app/assets/test/tests.js             |   1 +
 .../app/controllers/main/admin/kerberos.js      |   8 +-
 .../main/admin/stack_and_upgrade_controller.js  |  42 ++-
 .../modal_popups/cluster_check_dialog.hbs       |  24 +-
 .../common/modal_popups/cluster_check_popup.js  | 107 ++------
 .../admin/stack_and_upgrade_controller_test.js  |   2 +-
 .../modal_popups/cluster_check_popup_test.js    | 271 +++++++++++++++++++
 7 files changed, 352 insertions(+), 103 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/b470e3e7/ambari-web/app/assets/test/tests.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/assets/test/tests.js b/ambari-web/app/assets/test/tests.js
index f8d59f1..23460b7 100644
--- a/ambari-web/app/assets/test/tests.js
+++ b/ambari-web/app/assets/test/tests.js
@@ -202,6 +202,7 @@ var files = [
   'test/views/common/widget/graph_widget_view_test',
   'test/views/common/widget/number_widget_view_test',
   'test/views/common/widget/gauge_widget_view_test',
+  'test/views/common/modal_popups/cluster_check_popup_test',
   'test/views/common/modal_popups/hosts_table_list_popup_test',
   'test/views/common/modal_popups/dependent_configs_list_popup_test',
   'test/views/main/admin_test',

http://git-wip-us.apache.org/repos/asf/ambari/blob/b470e3e7/ambari-web/app/controllers/main/admin/kerberos.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/controllers/main/admin/kerberos.js b/ambari-web/app/controllers/main/admin/kerberos.js
index 57ee8c1..ac2a332 100644
--- a/ambari-web/app/controllers/main/admin/kerberos.js
+++ b/ambari-web/app/controllers/main/admin/kerberos.js
@@ -221,10 +221,16 @@ App.MainAdminKerberosController = App.KerberosWizardStep4Controller.extend({
   runSecurityCheckSuccess: function (data, opt, params) {
     //TODO correct check
     if (data.items.someProperty('UpgradeChecks.status', "FAIL")) {
+      var hasFails = data.items.someProperty('UpgradeChecks.status', 'FAIL');
       var header = Em.I18n.t('popup.clusterCheck.Security.header').format(params.label);
       var title = Em.I18n.t('popup.clusterCheck.Security.title');
       var alert = Em.I18n.t('popup.clusterCheck.Security.alert');
-      App.showClusterCheckPopup(data, header, title, alert);
+      App.showClusterCheckPopup(data, {
+        header: header,
+        failTitle: title,
+        failAlert: alert,
+        noCallbackCondition: hasFails
+      });
     } else {
       this.startKerberosWizard();
     }

http://git-wip-us.apache.org/repos/asf/ambari/blob/b470e3e7/ambari-web/app/controllers/main/admin/stack_and_upgrade_controller.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/controllers/main/admin/stack_and_upgrade_controller.js b/ambari-web/app/controllers/main/admin/stack_and_upgrade_controller.js
index f331540..7094d67 100644
--- a/ambari-web/app/controllers/main/admin/stack_and_upgrade_controller.js
+++ b/ambari-web/app/controllers/main/admin/stack_and_upgrade_controller.js
@@ -720,8 +720,10 @@ App.MainAdminStackAndUpgradeController = Em.Controller.extend(App.LocalStorage,
             configsMergeWarning = data.items.findProperty('UpgradeChecks.id', "CONFIG_MERGE"),
             configs = [];
           if (configsMergeWarning && Em.get(configsMergeWarning, 'UpgradeChecks.status') === 'WARNING') {
-            data.items = data.items.rejectProperty('UpgradeChecks.id', 'CONFIG_MERGE');
-            var configsMergeCheckData = Em.get(configsMergeWarning, 'UpgradeChecks.failed_detail');
+            var popupData = {
+                items: data.items.rejectProperty('UpgradeChecks.id', 'CONFIG_MERGE')
+              },
+              configsMergeCheckData = Em.get(configsMergeWarning, 'UpgradeChecks.failed_detail');
             if (configsMergeCheckData) {
               configs = configsMergeCheckData.map(function (item) {
                 var isDeprecated = Em.isNone(item.new_stack_value),
@@ -738,12 +740,21 @@ App.MainAdminStackAndUpgradeController = Em.Controller.extend(App.LocalStorage,
               });
             }
           }
-          App.showPreUpgradeCheckPopup(data, header, failTitle, failAlert, warningTitle, warningAlert, function () {
-            self.runPreUpgradeCheckOnly.call(self, {
-              value: version.get('repositoryVersion'),
-              label: version.get('displayName'),
-              type: event.context.get('type')
-            });
+          App.showClusterCheckPopup(popupData, {
+            header: header,
+            failTitle: failTitle,
+            failAlert: failAlert,
+            warningTitle: warningTitle,
+            warningAlert: warningAlert,
+            primary: Em.I18n.t('admin.stackVersions.version.upgrade.upgradeOptions.preCheck.rerun'),
+            secondary: Em.I18n.t('common.cancel'),
+            callback: function () {
+              self.runPreUpgradeCheckOnly.call(self, {
+                value: version.get('repositoryVersion'),
+                label: version.get('displayName'),
+                type: event.context.get('type')
+              });
+            }
           }, configs, version.get('displayName'));
         }
       }),
@@ -958,7 +969,8 @@ App.MainAdminStackAndUpgradeController = Em.Controller.extend(App.LocalStorage,
     var self = this;
     if (data.items.someProperty('UpgradeChecks.status', 'FAIL') || data.items.someProperty('UpgradeChecks.status', 'WARNING')) {
       this.set('requestInProgress', false);
-      var header = Em.I18n.t('popup.clusterCheck.Upgrade.header').format(params.label),
+      var hasFails = data.items.someProperty('UpgradeChecks.status', 'FAIL'),
+        header = Em.I18n.t('popup.clusterCheck.Upgrade.header').format(params.label),
         failTitle = Em.I18n.t('popup.clusterCheck.Upgrade.fail.title'),
         failAlert = new Em.Handlebars.SafeString(Em.I18n.t('popup.clusterCheck.Upgrade.fail.alert')),
         warningTitle = Em.I18n.t('popup.clusterCheck.Upgrade.warning.title'),
@@ -984,8 +996,16 @@ App.MainAdminStackAndUpgradeController = Em.Controller.extend(App.LocalStorage,
           });
         }
       }
-      App.showClusterCheckPopup(data, header, failTitle, failAlert, warningTitle, warningAlert, function () {
-        self.upgrade(params);
+      App.showClusterCheckPopup(data, {
+        header: header,
+        failTitle: failTitle,
+        failAlert: failAlert,
+        warningTitle: warningTitle,
+        warningAlert: warningAlert,
+        noCallbackCondition: hasFails,
+        callback: function () {
+          self.upgrade(params);
+        }
       }, configs, params.label);
     } else {
       this.upgrade(params);

http://git-wip-us.apache.org/repos/asf/ambari/blob/b470e3e7/ambari-web/app/templates/common/modal_popups/cluster_check_dialog.hbs
----------------------------------------------------------------------
diff --git a/ambari-web/app/templates/common/modal_popups/cluster_check_dialog.hbs b/ambari-web/app/templates/common/modal_popups/cluster_check_dialog.hbs
index 8767d70..8fd2746 100644
--- a/ambari-web/app/templates/common/modal_popups/cluster_check_dialog.hbs
+++ b/ambari-web/app/templates/common/modal_popups/cluster_check_dialog.hbs
@@ -20,10 +20,14 @@
     <i class="icon-ok"></i>&nbsp;<span>{{t admin.stackVersions.version.upgrade.upgradeOptions.preCheck.allPassed.msg}}</span>
   {{/if}}
   {{#if view.fails.length}}
-    <h4>{{view.failTitle}}</h4>
-    <div class="alert alert-warning">
-      {{view.failAlert}}
-    </div>
+    {{#if view.failTitle}}
+      <h4>{{view.failTitle}}</h4>
+    {{/if}}
+    {{#if view.failAlert}}
+      <div class="alert alert-warning">
+        {{view.failAlert}}
+      </div>
+    {{/if}}
     <div class="limited-height-2">
       {{#each item in view.fails}}
         <i class="icon-remove"></i>&nbsp;<span>{{item.UpgradeChecks.check}}</span>
@@ -32,10 +36,14 @@
     </div>
   {{/if}}
   {{#if view.warnings.length}}
-    <h4>{{view.warningTitle}}</h4>
-    <div class="alert alert-warning">
-      {{view.warningAlert}}
-    </div>
+    {{#if view.warningTitle}}
+      <h4>{{view.warningTitle}}</h4>
+    {{/if}}
+    {{#if view.warningAlert}}
+      <div class="alert alert-warning">
+        {{view.warningAlert}}
+      </div>
+    {{/if}}
     <div class="limited-height-2">
       {{#each item in view.warnings}}
         <i class="icon-warning-sign"></i>&nbsp;<span>{{item.UpgradeChecks.check}}</span>

http://git-wip-us.apache.org/repos/asf/ambari/blob/b470e3e7/ambari-web/app/views/common/modal_popups/cluster_check_popup.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views/common/modal_popups/cluster_check_popup.js b/ambari-web/app/views/common/modal_popups/cluster_check_popup.js
index 681c807..54f20ba 100644
--- a/ambari-web/app/views/common/modal_popups/cluster_check_popup.js
+++ b/ambari-web/app/views/common/modal_popups/cluster_check_popup.js
@@ -22,30 +22,33 @@ var App = require('app');
  * popup to display requirements that are not met
  * for current action
  * @param data
- * @param header
- * @param failTitle
- * @param failAlert
- * @param warningTitle
- * @param warningAlert
- * @param callback
+ * @param popup
  * @param configs
  * @param upgradeVersion
  * @returns {*|void}
  */
-App.showClusterCheckPopup = function (data, header, failTitle, failAlert, warningTitle, warningAlert, callback, configs, upgradeVersion) {
+App.showClusterCheckPopup = function (data, popup, configs, upgradeVersion) {
   var fails = data.items.filterProperty('UpgradeChecks.status', 'FAIL'),
     warnings = data.items.filterProperty('UpgradeChecks.status', 'WARNING'),
     hasConfigsMergeConflicts = !!(configs && configs.length),
-    popupBody = {
-      failTitle: failTitle,
-      failAlert: failAlert,
-      warningTitle: warningTitle,
-      warningAlert: warningAlert,
-      templateName: require('templates/common/modal_popups/cluster_check_dialog'),
-      fails: fails,
-      warnings: warnings,
-      hasConfigsMergeConflicts: hasConfigsMergeConflicts
-    };
+    primary,
+    secondary,
+    popupBody;
+  popup = popup || {};
+  primary = Em.isNone(popup.primary) ?
+    (fails.length ? Em.I18n.t('common.dismiss') : Em.I18n.t('common.proceedAnyway')) : popup.primary;
+  secondary = Em.isNone(popup.secondary) ? (fails.length ? false : Em.I18n.t('common.cancel')) : popup.secondary;
+  popupBody = {
+    failTitle: popup.failTitle,
+    failAlert: popup.failAlert,
+    warningTitle: popup.warningTitle,
+    warningAlert: popup.warningAlert,
+    templateName: require('templates/common/modal_popups/cluster_check_dialog'),
+    fails: fails,
+    warnings: warnings,
+    hasConfigsMergeConflicts: hasConfigsMergeConflicts,
+    isAllPassed: !fails.length && !warnings.length && !hasConfigsMergeConflicts
+  };
   if (hasConfigsMergeConflicts) {
     popupBody.configsMergeTable = Em.View.extend({
       templateName: require('templates/main/admin/stack_upgrade/upgrade_configs_merge_table'),
@@ -58,75 +61,15 @@ App.showClusterCheckPopup = function (data, header, failTitle, failAlert, warnin
     });
   }
   return App.ModalPopup.show({
-    primary: fails.length ? Em.I18n.t('common.dismiss') : Em.I18n.t('common.proceedAnyway'),
-    secondary: fails.length ? false : Em.I18n.t('common.cancel'),
-    header: header,
-    classNames: ['cluster-check-popup'],
-    bodyClass: Em.View.extend(popupBody),
-    onPrimary: function () {
-      if (!fails.length && callback) {
-        callback();
-      }
-      this._super();
-    },
-    didInsertElement: function () {
-      this._super();
-      this.fitHeight();
-    }
-  });
-};
-
-
-/**
- * popup to display requirements that are not met
- * for current action
- * @param data
- * @param header
- * @param failTitle
- * @param failAlert
- * @param warningTitle
- * @param warningAlert
- * @param callback
- * @param configs
- * @param upgradeVersion
- * @returns {*|void}
- */
-App.showPreUpgradeCheckPopup = function (data, header, failTitle, failAlert, warningTitle, warningAlert, callback, configs, upgradeVersion) {
-  var fails = data.items.filterProperty('UpgradeChecks.status', 'FAIL'),
-    warnings = data.items.filterProperty('UpgradeChecks.status', 'WARNING'),
-    hasConfigsMergeConflicts = !!(configs && configs.length),
-    popupBody = {
-      failTitle: failTitle,
-      failAlert: failAlert,
-      warningTitle: warningTitle,
-      warningAlert: warningAlert,
-      templateName: require('templates/common/modal_popups/cluster_check_dialog'),
-      fails: fails,
-      warnings: warnings,
-      hasConfigsMergeConflicts: hasConfigsMergeConflicts,
-      isAllPassed: !fails.length && !warnings.length && !hasConfigsMergeConflicts.length
-    };
-  if (hasConfigsMergeConflicts) {
-    popupBody.configsMergeTable = Em.View.extend({
-      templateName: require('templates/main/admin/stack_upgrade/upgrade_configs_merge_table'),
-      configs: configs,
-      didInsertElement: function () {
-        App.tooltip($('.recommended-value'), {
-          title: upgradeVersion
-        });
-      }
-    });
-  }
-  return App.ModalPopup.show({
-    primary: Em.I18n.t('admin.stackVersions.version.upgrade.upgradeOptions.preCheck.rerun'),
-    secondary: Em.I18n.t('common.cancel'),
-    header: header,
+    primary: primary,
+    secondary: secondary,
+    header: popup.header,
     classNames: ['cluster-check-popup'],
     bodyClass: Em.View.extend(popupBody),
     onPrimary: function () {
       this._super();
-      if (callback) {
-        callback();
+      if (!popup.noCallbackCondition && popup.callback) {
+        popup.callback();
       }
     },
     didInsertElement: function () {

http://git-wip-us.apache.org/repos/asf/ambari/blob/b470e3e7/ambari-web/test/controllers/main/admin/stack_and_upgrade_controller_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/controllers/main/admin/stack_and_upgrade_controller_test.js b/ambari-web/test/controllers/main/admin/stack_and_upgrade_controller_test.js
index 65739cd..b9c7cb7 100644
--- a/ambari-web/test/controllers/main/admin/stack_and_upgrade_controller_test.js
+++ b/ambari-web/test/controllers/main/admin/stack_and_upgrade_controller_test.js
@@ -454,7 +454,7 @@ describe('App.MainAdminStackAndUpgradeController', function() {
         expect(controller.upgrade.callCount).to.equal(item.upgradeCalledCount);
         expect(App.showClusterCheckPopup.callCount).to.equal(item.showClusterCheckPopupCalledCount);
         if (item.check.id == 'CONFIG_MERGE') {
-          expect(App.showClusterCheckPopup.firstCall.args[7]).to.eql(item.configs);
+          expect(App.showClusterCheckPopup.firstCall.args[2]).to.eql(item.configs);
         }
       });
     });

http://git-wip-us.apache.org/repos/asf/ambari/blob/b470e3e7/ambari-web/test/views/common/modal_popups/cluster_check_popup_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/views/common/modal_popups/cluster_check_popup_test.js b/ambari-web/test/views/common/modal_popups/cluster_check_popup_test.js
new file mode 100644
index 0000000..7f02730
--- /dev/null
+++ b/ambari-web/test/views/common/modal_popups/cluster_check_popup_test.js
@@ -0,0 +1,271 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+var App = require('app');
+require('views/common/modal_popups/cluster_check_popup');
+
+describe('App.showClusterCheckPopup', function () {
+
+  var isCallbackExecuted,
+    callback = function () {
+      isCallbackExecuted = true;
+    },
+    cases = [
+      {
+        inputData: {
+          data: {
+            items: [
+              {
+                UpgradeChecks: {
+                  id: 'p0',
+                  status: 'PASS'
+                }
+              },
+              {
+                UpgradeChecks: {
+                  id: 'p1',
+                  status: 'PASS'
+                }
+              }
+            ]
+          }
+        },
+        result: {
+          primary: Em.I18n.t('common.proceedAnyway'),
+          secondary: Em.I18n.t('common.cancel'),
+          header: '&nbsp;'
+        },
+        bodyResult: {
+          failTitle: undefined,
+          failAlert: undefined,
+          warningTitle: undefined,
+          warningAlert: undefined,
+          fails: [],
+          warnings: [],
+          hasConfigsMergeConflicts: false,
+          isAllPassed: true
+        },
+        isCallbackExecuted: false,
+        title: 'no fails, no warnings, no popup customization'
+      },
+      {
+        inputData: {
+          data: {
+            items: [
+              {
+                UpgradeChecks: {
+                  id: 'w0',
+                  status: 'WARNING'
+                }
+              },
+              {
+                UpgradeChecks: {
+                  id: 'w1',
+                  status: 'WARNING'
+                }
+              }
+            ]
+          },
+          popup: {
+            header: 'checks',
+            failTitle: 'fail',
+            failAlert: 'something has failed',
+            warningTitle: 'warning',
+            warningAlert: 'something is not good',
+            callback: callback
+          }
+        },
+        result: {
+          primary: Em.I18n.t('common.proceedAnyway'),
+          secondary: Em.I18n.t('common.cancel'),
+          header: 'checks'
+        },
+        bodyResult: {
+          failTitle: 'fail',
+          failAlert: 'something has failed',
+          warningTitle: 'warning',
+          warningAlert: 'something is not good',
+          fails: [],
+          warnings: [
+            {
+              UpgradeChecks: {
+                id: 'w0',
+                status: 'WARNING'
+              }
+            },
+            {
+              UpgradeChecks: {
+                id: 'w1',
+                status: 'WARNING'
+              }
+            }
+          ],
+          hasConfigsMergeConflicts: false,
+          isAllPassed: false
+        },
+        isCallbackExecuted: true,
+        title: 'no fails, default buttons, callback executed'
+      },
+      {
+        inputData: {
+          data: {
+            items: [
+              {
+                UpgradeChecks: {
+                  id: 'f0',
+                  status: 'FAIL'
+                }
+              },
+              {
+                UpgradeChecks: {
+                  id: 'f1',
+                  status: 'FAIL'
+                }
+              }
+            ]
+          },
+          popup: {
+            callback: callback,
+            noCallbackCondition: true
+          }
+        },
+        result: {
+          primary: Em.I18n.t('common.dismiss'),
+          secondary: false,
+          header: '&nbsp;'
+        },
+        bodyResult: {
+          failTitle: undefined,
+          failAlert: undefined,
+          warningTitle: undefined,
+          warningAlert: undefined,
+          fails: [
+            {
+              UpgradeChecks: {
+                id: 'f0',
+                status: 'FAIL'
+              }
+            },
+            {
+              UpgradeChecks: {
+                id: 'f1',
+                status: 'FAIL'
+              }
+            }
+          ],
+          warnings: [],
+          hasConfigsMergeConflicts: false,
+          isAllPassed: false
+        },
+        isCallbackExecuted: false,
+        title: 'fails detected, default buttons, callback not executed'
+      },
+      {
+        inputData: {
+          data: {
+            items: [
+              {
+                UpgradeChecks: {
+                  id: 'p0',
+                  status: 'PASS'
+                }
+              },
+              {
+                UpgradeChecks: {
+                  id: 'p1',
+                  status: 'PASS'
+                }
+              }
+            ]
+          },
+          popup: {
+            primary: 'ok',
+            secondary: 'cancel'
+          },
+          configs: [
+            {
+              name: 'c0'
+            },
+            {
+              name: 'c1'
+            }
+          ],
+          upgradeVersion: 'HDP-2.3.0.0'
+        },
+        result: {
+          primary: 'ok',
+          secondary: 'cancel',
+          header: '&nbsp;'
+        },
+        bodyResult: {
+          failTitle: undefined,
+          failAlert: undefined,
+          warningTitle: undefined,
+          warningAlert: undefined,
+          fails: [],
+          warnings: [],
+          hasConfigsMergeConflicts: true,
+          isAllPassed: false
+        },
+        configsResult: [
+          {
+            name: 'c0'
+          },
+          {
+            name: 'c1'
+          }
+        ],
+        isCallbackExecuted: false,
+        title: 'configs merge conflicts detected, custom buttons'
+      }
+    ];
+
+  beforeEach(function () {
+    isCallbackExecuted = false;
+    sinon.stub(App, 'tooltip', Em.K);
+  });
+
+  afterEach(function () {
+    App.tooltip.restore();
+  });
+
+  cases.forEach(function (item) {
+    it(item.title, function () {
+      var popup = App.showClusterCheckPopup(item.inputData.data, item.inputData.popup, item.inputData.configs, item.inputData.upgradeVersion),
+        popupBody = popup.bodyClass.create();
+      popup.onPrimary();
+      Em.keys(item.result).forEach(function (key) {
+        expect(popup[key]).to.equal(item.result[key]);
+      });
+      Em.keys(item.bodyResult).forEach(function (key) {
+        expect(popupBody[key]).to.eql(item.bodyResult[key]);
+      });
+      expect(isCallbackExecuted).to.equal(item.isCallbackExecuted);
+      if (item.bodyResult.hasConfigsMergeConflicts) {
+        var configsMergeTable = popupBody.configsMergeTable.create();
+        configsMergeTable.didInsertElement();
+        expect(configsMergeTable.configs).to.eql(item.configsResult);
+        expect(App.tooltip.calledOnce).to.be.true;
+        expect(App.tooltip.firstCall.args[1].title).to.equal(item.inputData.upgradeVersion);
+      } else {
+        expect(App.tooltip.calledOnce).to.be.false;
+      }
+    });
+  });
+
+});


[40/50] ambari git commit: AMBARI-14118 All Kerberos-related fields are to be made read-only in the service configuration screens. (atkach)

Posted by nc...@apache.org.
AMBARI-14118 All Kerberos-related fields are to be made read-only in the service configuration screens. (atkach)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/6c945804
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/6c945804
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/6c945804

Branch: refs/heads/branch-dev-patch-upgrade
Commit: 6c945804b5971eae9158ebb98d1a044d47f910f6
Parents: 878c61d
Author: Andrii Tkach <at...@hortonworks.com>
Authored: Mon Nov 30 19:40:48 2015 +0200
Committer: Andrii Tkach <at...@hortonworks.com>
Committed: Tue Dec 1 12:50:42 2015 +0200

----------------------------------------------------------------------
 ambari-web/app/app.js                           |  6 ++
 .../configs/stack_config_properties_mapper.js   |  2 +-
 ambari-web/app/utils/config.js                  | 11 ++++
 ambari-web/test/utils/config_test.js            | 59 +++++++++++++++++++-
 4 files changed, 76 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/6c945804/ambari-web/app/app.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/app.js b/ambari-web/app/app.js
index f2548f8..0932964 100644
--- a/ambari-web/app/app.js
+++ b/ambari-web/app/app.js
@@ -38,6 +38,12 @@ module.exports = Em.Application.create({
   isPermissionDataLoaded: false,
 
   /**
+   * @type {boolean}
+   * @default false
+   */
+  isKerberosEnabled: false,
+
+  /**
    * state of stack upgrade process
    * states:
    *  - INIT

http://git-wip-us.apache.org/repos/asf/ambari/blob/6c945804/ambari-web/app/mappers/configs/stack_config_properties_mapper.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/mappers/configs/stack_config_properties_mapper.js b/ambari-web/app/mappers/configs/stack_config_properties_mapper.js
index 750a7af..3cc08a4 100644
--- a/ambari-web/app/mappers/configs/stack_config_properties_mapper.js
+++ b/ambari-web/app/mappers/configs/stack_config_properties_mapper.js
@@ -143,7 +143,7 @@ App.stackConfigPropertiesMapper = App.QuickDataMapper.create({
           var staticConfigInfo = this.parseIt(config, this.get('configToPlain'));
           var v = Em.isNone(staticConfigInfo.recommendedValue) ? staticConfigInfo.recommendedValue : staticConfigInfo.value;
           staticConfigInfo.value = staticConfigInfo.recommendedValue = App.config.formatPropertyValue(staticConfigInfo, v);
-          staticConfigInfo.isSecure = App.config.getIsSecure(staticConfigInfo.name);
+          staticConfigInfo.isSecureConfig = App.config.getIsSecure(staticConfigInfo.name);
           staticConfigInfo.isUserProperty = false;
           App.configsCollection.add(staticConfigInfo);
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/6c945804/ambari-web/app/utils/config.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/utils/config.js b/ambari-web/app/utils/config.js
index c27ed68..f1240bc 100644
--- a/ambari-web/app/utils/config.js
+++ b/ambari-web/app/utils/config.js
@@ -235,6 +235,7 @@ App.config = Em.Object.create({
       for (var index in properties) {
         var advancedConfig = App.configsCollection.getConfigByName(index, siteConfig.type);
         var serviceConfigObj = advancedConfig || this.createDefaultConfig(index, serviceName, filename, false);
+        this.restrictSecureProperties(serviceConfigObj);
 
         if (serviceConfigObj.isRequiredByAgent !== false) {
           var formattedValue = this.formatPropertyValue(serviceConfigObj, properties[index]);
@@ -253,6 +254,16 @@ App.config = Em.Object.create({
   },
 
   /**
+   * put secure properties in read-only mode
+   * @param {object} config
+   */
+  restrictSecureProperties: function (config) {
+    var isReadOnly = config.isSecureConfig && App.get('isKerberosEnabled');
+    config.isReconfigurable = !isReadOnly;
+    config.isOverridable = !isReadOnly;
+  },
+
+  /**
    * This method sets default values for config property
    * These property values has the lowest priority and can be overridden be stack/UI
    * config property but is used when such properties are absent in stack/UI configs

http://git-wip-us.apache.org/repos/asf/ambari/blob/6c945804/ambari-web/test/utils/config_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/utils/config_test.js b/ambari-web/test/utils/config_test.js
index 62a6689..ff770f0 100644
--- a/ambari-web/test/utils/config_test.js
+++ b/ambari-web/test/utils/config_test.js
@@ -1010,5 +1010,62 @@ describe('App.config', function () {
           "index": 0
         })
     });
-  })
+  });
+
+  describe("#restrictSecureProperties()", function() {
+    var testCases = [
+      {
+        input: {
+          isSecureConfig: true,
+          isKerberosEnabled: true
+        },
+        expected: {
+          isReconfigurable: false,
+          isOverridable: false
+        }
+      },
+      {
+        input: {
+          isSecureConfig: false,
+          isKerberosEnabled: true
+        },
+        expected: {
+          isReconfigurable: true,
+          isOverridable: true
+        }
+      },
+      {
+        input: {
+          isSecureConfig: true,
+          isKerberosEnabled: false
+        },
+        expected: {
+          isReconfigurable: true,
+          isOverridable: true
+        }
+      },
+      {
+        input: {
+          isSecureConfig: false,
+          isKerberosEnabled: false
+        },
+        expected: {
+          isReconfigurable: true,
+          isOverridable: true
+        }
+      }
+    ];
+
+    testCases.forEach(function(test) {
+      it("isSecureConfig = " + test.input.isSecureConfig + "; isKerberosEnabled = " + test.input.isKerberosEnabled, function() {
+        var config = {
+          isSecureConfig: test.input.isSecureConfig
+        };
+        App.set('isKerberosEnabled', test.input.isKerberosEnabled);
+        App.config.restrictSecureProperties(config);
+        expect(config.isReconfigurable).to.equal(test.expected.isReconfigurable);
+        expect(config.isOverridable).to.equal(test.expected.isOverridable);
+      });
+    });
+  });
 });


[34/50] ambari git commit: AMBARI-14053. PXF should get secured when security is enabled on cluster via kerberos wizard on ambari (Bhuvnesh Chaudhary via odiachenko).

Posted by nc...@apache.org.
AMBARI-14053. PXF should get secured when security is enabled on cluster via kerberos wizard on ambari (Bhuvnesh Chaudhary via odiachenko).


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/ca53dfd1
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/ca53dfd1
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/ca53dfd1

Branch: refs/heads/branch-dev-patch-upgrade
Commit: ca53dfd106099a37712a04e5f6dc65329baca9d1
Parents: 1f1c661
Author: Oleksandr Diachenko <od...@pivotal.io>
Authored: Mon Nov 30 16:30:04 2015 -0800
Committer: Oleksandr Diachenko <od...@pivotal.io>
Committed: Mon Nov 30 16:30:04 2015 -0800

----------------------------------------------------------------------
 .../common-services/PXF/3.0.0/kerberos.json     | 35 ++++++++++++++++++++
 .../PXF/3.0.0/package/scripts/params.py         |  4 +++
 .../PXF/3.0.0/package/scripts/pxf.py            | 10 +++++-
 3 files changed, 48 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/ca53dfd1/ambari-server/src/main/resources/common-services/PXF/3.0.0/kerberos.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/PXF/3.0.0/kerberos.json b/ambari-server/src/main/resources/common-services/PXF/3.0.0/kerberos.json
new file mode 100644
index 0000000..0a3c3c7
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/PXF/3.0.0/kerberos.json
@@ -0,0 +1,35 @@
+{
+    "services": [
+        {
+            "components": [
+                {
+                    "identities": [
+                        {
+                            "keytab": {
+                                "configuration": null,
+                                "file": "${keytab_dir}/pxf.service.keytab",
+                                "group": {
+                                    "access": "",
+                                    "name": "${cluster-env/user_group}"
+                                },
+                                "owner": {
+                                    "access": "r",
+                                    "name": "pxf"
+                                }
+                            },
+                            "name": "pxf_client_pxf",
+                            "principal": {
+                                "configuration": null,
+                                "local_username": null,
+                                "type": "service",
+                                "value": "pxf/_HOST@${realm}"
+                            }
+                        }
+                    ],
+                    "name": "PXF"
+                }
+            ],
+            "name": "PXF"
+        }
+    ]
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/ca53dfd1/ambari-server/src/main/resources/common-services/PXF/3.0.0/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/PXF/3.0.0/package/scripts/params.py b/ambari-server/src/main/resources/common-services/PXF/3.0.0/package/scripts/params.py
index a4986c9..1d77787 100644
--- a/ambari-server/src/main/resources/common-services/PXF/3.0.0/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/PXF/3.0.0/package/scripts/params.py
@@ -40,3 +40,7 @@ java_home = config["hostLevelParams"]["java_home"] if "java_home" in config["hos
 
 # Timeouts
 default_exec_timeout = 600
+
+# security related
+security_enabled = config['configurations']['cluster-env']['security_enabled']
+realm_name = config['configurations']['kerberos-env']['realm']

http://git-wip-us.apache.org/repos/asf/ambari/blob/ca53dfd1/ambari-server/src/main/resources/common-services/PXF/3.0.0/package/scripts/pxf.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/PXF/3.0.0/package/scripts/pxf.py b/ambari-server/src/main/resources/common-services/PXF/3.0.0/package/scripts/pxf.py
index dd0031c..08475fd 100644
--- a/ambari-server/src/main/resources/common-services/PXF/3.0.0/package/scripts/pxf.py
+++ b/ambari-server/src/main/resources/common-services/PXF/3.0.0/package/scripts/pxf.py
@@ -21,6 +21,7 @@ limitations under the License.
 from resource_management import Script
 
 from resource_management.libraries.resources.xml_config import XmlConfig
+from resource_management.libraries.script.config_dictionary import ConfigDictionary
 from resource_management.core.resources.accounts import User
 from resource_management.core.resources.system import Directory, File, Execute
 from resource_management.core.source import Template
@@ -99,9 +100,16 @@ class Pxf(Script):
       shutil.copy2("{0}/pxf-privatehdp.classpath".format(params.pxf_conf_dir),
                    "{0}/pxf-private.classpath".format(params.pxf_conf_dir))
 
+    if params.security_enabled:
+      pxf_site_dict = dict(params.config['configurations']['pxf-site'])
+      pxf_site_dict['pxf.service.kerberos.principal'] = "{0}/_HOST@{1}".format(params.pxf_user, params.realm_name)
+      pxf_site = ConfigDictionary(pxf_site_dict)
+    else:
+      pxf_site = params.config['configurations']['pxf-site']
+
     XmlConfig("pxf-site.xml",
               conf_dir=params.pxf_conf_dir,
-              configurations=params.config['configurations']['pxf-site'],
+              configurations=pxf_site,
               configuration_attributes=params.config['configuration_attributes']['pxf-site'])
 
 


[09/50] ambari git commit: AMBARI-14099. Tez config relationship dependency not taking effect (aonishuk)

Posted by nc...@apache.org.
AMBARI-14099. Tez config relationship dependency not taking effect  (aonishuk)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/01f7d709
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/01f7d709
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/01f7d709

Branch: refs/heads/branch-dev-patch-upgrade
Commit: 01f7d709e8f49b3d2095ebb4de6c269e4aad1230
Parents: ebdda3b
Author: Andrew Onishuk <ao...@hortonworks.com>
Authored: Fri Nov 27 16:51:28 2015 +0200
Committer: Andrew Onishuk <ao...@hortonworks.com>
Committed: Fri Nov 27 16:51:28 2015 +0200

----------------------------------------------------------------------
 .../HDP/2.2/services/TEZ/configuration/tez-site.xml     | 12 ++++++++++++
 .../resources/stacks/HDP/2.2/services/stack_advisor.py  |  3 ++-
 .../resources/stacks/HDP/2.3/services/stack_advisor.py  |  3 +--
 3 files changed, 15 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/01f7d709/ambari-server/src/main/resources/stacks/HDP/2.2/services/TEZ/configuration/tez-site.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2/services/TEZ/configuration/tez-site.xml b/ambari-server/src/main/resources/stacks/HDP/2.2/services/TEZ/configuration/tez-site.xml
index 33920d6..3a04f70 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.2/services/TEZ/configuration/tez-site.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2/services/TEZ/configuration/tez-site.xml
@@ -304,12 +304,24 @@
     <name>tez.runtime.io.sort.mb</name>
     <value>512</value>
     <description>The size of the sort buffer when output needs to be sorted</description>
+    <depends-on>
+      <property>
+        <type>tez-site</type>
+        <name>tez.task.resource.memory.mb</name>
+      </property>
+    </depends-on>
   </property>
 
   <property>
     <name>tez.runtime.unordered.output.buffer.size-mb</name>
     <value>100</value>
     <description>The size of the buffer when output does not require to be sorted</description>
+    <depends-on>
+      <property>
+        <type>tez-site</type>
+        <name>tez.task.resource.memory.mb</name>
+      </property>
+    </depends-on>
   </property>
 
   <property>

http://git-wip-us.apache.org/repos/asf/ambari/blob/01f7d709/ambari-server/src/main/resources/stacks/HDP/2.2/services/stack_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2/services/stack_advisor.py b/ambari-server/src/main/resources/stacks/HDP/2.2/services/stack_advisor.py
index c9b3aa3..a33be1e 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.2/services/stack_advisor.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2/services/stack_advisor.py
@@ -744,12 +744,13 @@ class HDP22StackAdvisor(HDP21StackAdvisor):
     #properties below should be always present as they are provided in HDP206 stack advisor
     yarnMaxAllocationSize = min(30 * int(configurations["yarn-site"]["properties"]["yarn.scheduler.minimum-allocation-mb"]), int(configurations["yarn-site"]["properties"]["yarn.scheduler.maximum-allocation-mb"]))
 
-    putTezProperty = self.putProperty(configurations, "tez-site")
+    putTezProperty = self.putProperty(configurations, "tez-site", services)
     putTezProperty("tez.am.resource.memory.mb", min(int(configurations["yarn-site"]["properties"]["yarn.scheduler.maximum-allocation-mb"]), int(clusterData['amMemory']) * 2 if int(clusterData['amMemory']) < 3072 else int(clusterData['amMemory'])))
 
     taskResourceMemory = clusterData['mapMemory'] if clusterData['mapMemory'] > 2048 else int(clusterData['reduceMemory'])
     taskResourceMemory = min(clusterData['containers'] * clusterData['ramPerContainer'], taskResourceMemory, yarnMaxAllocationSize)
     putTezProperty("tez.task.resource.memory.mb", min(int(configurations["yarn-site"]["properties"]["yarn.scheduler.maximum-allocation-mb"]), taskResourceMemory))
+    taskResourceMemory = int(configurations["tez-site"]["properties"]["tez.task.resource.memory.mb"])
     putTezProperty("tez.runtime.io.sort.mb", min(int(taskResourceMemory * 0.4), 2047))
     putTezProperty("tez.runtime.unordered.output.buffer.size-mb", int(taskResourceMemory * 0.075))
     putTezProperty("tez.session.am.dag.submit.timeout.secs", "600")

http://git-wip-us.apache.org/repos/asf/ambari/blob/01f7d709/ambari-server/src/main/resources/stacks/HDP/2.3/services/stack_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/services/stack_advisor.py b/ambari-server/src/main/resources/stacks/HDP/2.3/services/stack_advisor.py
index 419fb3a..a5e419a 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/services/stack_advisor.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/services/stack_advisor.py
@@ -128,8 +128,7 @@ class HDP23StackAdvisor(HDP22StackAdvisor):
     putTezProperty = self.putProperty(configurations, "tez-site")
     # remove 2gb limit for tez.runtime.io.sort.mb
     # in HDP 2.3 "tez.runtime.sorter.class" is set by default to PIPELINED, in other case comment calculation code below
-    taskResourceMemory = clusterData['mapMemory'] if clusterData['mapMemory'] > 2048 else int(clusterData['reduceMemory'])
-    taskResourceMemory = min(clusterData['containers'] * clusterData['ramPerContainer'], taskResourceMemory)
+    taskResourceMemory = int(configurations["tez-site"]["properties"]["tez.task.resource.memory.mb"])
     putTezProperty("tez.runtime.io.sort.mb", int(taskResourceMemory * 0.4))
 
     if "tez-site" in services["configurations"] and "tez.runtime.sorter.class" in services["configurations"]["tez-site"]["properties"]:


[50/50] ambari git commit: Merge branch 'trunk' into branch-dev-patch-upgrade

Posted by nc...@apache.org.
Merge branch 'trunk' into branch-dev-patch-upgrade


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/6ae63efd
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/6ae63efd
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/6ae63efd

Branch: refs/heads/branch-dev-patch-upgrade
Commit: 6ae63efd6fdb52aa42795b78a4105e82a2ab96f4
Parents: 47a1bcf 108fc44
Author: Nate Cole <nc...@hortonworks.com>
Authored: Tue Dec 1 13:19:24 2015 -0500
Committer: Nate Cole <nc...@hortonworks.com>
Committed: Tue Dec 1 13:19:24 2015 -0500

----------------------------------------------------------------------
 .../main/resources/ui/admin-web/app/index.html  |     1 +
 .../clusters/ClustersManageAccessCtrl.js        |     6 +-
 .../controllers/clusters/UserAccessListCtrl.js  |   131 +
 .../controllers/groups/GroupsEditCtrl.js        |     4 +-
 .../scripts/controllers/users/UsersShowCtrl.js  |     4 +-
 .../ui/admin-web/app/scripts/routes.js          |     5 +
 .../admin-web/app/scripts/services/Cluster.js   |    32 +-
 .../ui/admin-web/app/scripts/services/User.js   |     4 +-
 .../resources/ui/admin-web/app/styles/main.css  |    18 +
 .../app/views/clusters/manageAccess.html        |    14 +-
 .../app/views/clusters/userAccessList.html      |    83 +
 .../ui/admin-web/app/views/groups/edit.html     |    14 +-
 .../ui/admin-web/app/views/leftNavbar.html      |     2 +-
 .../ui/admin-web/app/views/users/show.html      |    16 +-
 ambari-agent/conf/unix/ambari-agent             |    14 +-
 ambari-agent/conf/unix/ambari-env.sh            |     3 +
 .../ambari_agent/AlertSchedulerHandler.py       |     8 +-
 .../src/main/python/ambari_agent/Controller.py  |    11 +-
 .../src/main/python/ambari_agent/FileCache.py   |     3 +
 .../main/python/ambari_agent/RecoveryManager.py |    76 +-
 .../ambari_agent/alerts/recovery_alert.py       |   103 +
 .../python/ambari_agent/alerts/script_alert.py  |    32 +-
 .../src/main/python/ambari_agent/main.py        |     2 +-
 .../test/python/ambari_agent/TestActionQueue.py |     6 +-
 .../ambari_agent/TestAlertSchedulerHandler.py   |    22 +-
 .../src/test/python/ambari_agent/TestAlerts.py  |   109 +-
 .../test/python/ambari_agent/TestHeartbeat.py   |     7 +-
 .../src/test/python/ambari_agent/TestMain.py    |     2 +-
 .../python/ambari_agent/TestRecoveryManager.py  |    32 +-
 .../resource_management/TestPackageResource.py  |    10 +-
 .../core/providers/package/__init__.py          |    35 +
 .../core/providers/package/apt.py               |    18 +-
 .../core/providers/package/zypper.py            |    34 +-
 .../core/resources/packaging.py                 |     7 +
 .../libraries/functions/conf_select.py          |    25 +-
 .../libraries/functions/hive_check.py           |     2 +-
 .../libraries/functions/tar_archive.py          |    13 +-
 .../libraries/script/script.py                  |    12 +-
 ambari-common/src/main/unix/ambari-python-wrap  |     3 -
 ambari-metrics/ambari-metrics-common/pom.xml    |    20 +-
 .../timeline/AbstractTimelineMetricsSink.java   |    38 +-
 .../metrics2/sink/timeline/Precision.java       |    16 +
 .../PrecisionLimitExceededException.java        |    36 +
 .../cache/HandleConnectExceptionTest.java       |    48 +-
 .../flume/FlumeTimelineMetricsSinkTest.java     |     5 +-
 .../ambari-metrics-hadoop-sink/pom.xml          |    16 +-
 .../timeline/HadoopTimelineMetricsSink.java     |     4 -
 .../timeline/HadoopTimelineMetricsSinkTest.java |    19 +-
 .../ambari-metrics-kafka-sink/pom.xml           |     2 +-
 .../kafka/KafkaTimelineMetricsReporter.java     |     6 -
 .../kafka/KafkaTimelineMetricsReporterTest.java |    14 +-
 .../ambari-metrics-storm-sink/pom.xml           |     5 -
 .../sink/storm/StormTimelineMetricsSink.java    |     3 -
 .../storm/StormTimelineMetricsSinkTest.java     |    15 +-
 .../conf/unix/ambari-metrics-collector          |    24 +-
 .../ambari-metrics-timelineservice/pom.xml      |     6 +-
 .../metrics/timeline/PhoenixHBaseAccessor.java  |     8 +-
 .../timeline/query/PhoenixTransactSQL.java      |     5 +-
 .../webapp/TimelineWebServices.java             |     3 +
 ambari-metrics/pom.xml                          |     8 +-
 ambari-project/pom.xml                          |     2 +-
 ambari-server/conf/unix/ambari-env.sh           |     4 +
 ambari-server/conf/unix/ambari.properties       |     5 +-
 ambari-server/conf/windows/ambari.properties    |     3 +-
 ambari-server/pom.xml                           |     2 +-
 ambari-server/sbin/ambari-server                |    17 +-
 .../server/actionmanager/ActionDBAccessor.java  |     6 -
 .../actionmanager/ActionDBAccessorImpl.java     |    34 +-
 .../server/actionmanager/ActionManager.java     |    11 +-
 .../server/actionmanager/HostRoleCommand.java   |     8 +-
 .../ambari/server/agent/HeartBeatHandler.java   |    45 +-
 .../server/api/handlers/CreateHandler.java      |     7 +-
 .../server/api/handlers/DeleteHandler.java      |     3 +
 .../server/api/handlers/QueryCreateHandler.java |     3 +
 .../ambari/server/api/handlers/ReadHandler.java |     3 +
 .../server/api/handlers/UpdateHandler.java      |     3 +
 .../ambari/server/api/query/QueryImpl.java      |     4 +
 .../api/services/RoleAuthorizationService.java  |     6 +
 .../api/services/UserAuthorizationService.java  |     7 +
 .../ambari/server/api/services/UserService.java |     1 +
 .../StackAdvisorBlueprintProcessor.java         |    14 +-
 .../server/configuration/Configuration.java     |    14 +-
 .../controller/AmbariManagementController.java  |    29 +-
 .../AmbariManagementControllerImpl.java         |   239 +-
 .../ambari/server/controller/AmbariServer.java  |   209 +-
 .../server/controller/KerberosHelperImpl.java   |     6 +-
 .../server/controller/TaskStatusRequest.java    |    48 -
 .../server/controller/TaskStatusResponse.java   |   140 -
 .../AbstractAuthorizedResourceProvider.java     |   524 +
 .../AbstractControllerResourceProvider.java     |     2 +-
 .../internal/AbstractResourceProvider.java      |     5 +-
 .../ActiveWidgetLayoutResourceProvider.java     |    43 +-
 .../AmbariPrivilegeResourceProvider.java        |    54 +-
 .../BlueprintConfigurationProcessor.java        |     2 +-
 .../controller/internal/CalculatedStatus.java   |     6 +
 .../ClusterPrivilegeResourceProvider.java       |    31 +-
 .../internal/ConfigGroupResourceProvider.java   |     2 +-
 .../internal/PrivilegeResourceProvider.java     |    13 +-
 .../internal/RequestResourceProvider.java       |     8 +-
 .../RoleAuthorizationResourceProvider.java      |   160 +-
 .../internal/StackAdvisorResourceProvider.java  |     8 +-
 .../internal/StageResourceProvider.java         |     8 +-
 .../internal/TaskResourceProvider.java          |   201 +-
 .../UserAuthorizationResourceProvider.java      |   176 +-
 .../internal/UserPrivilegeResourceProvider.java |   115 +-
 .../internal/UserResourceProvider.java          |    14 +-
 .../internal/ViewPrivilegeResourceProvider.java |     5 +-
 .../metrics/MetricsPaddingMethod.java           |     5 +
 .../metrics/timeline/AMSPropertyProvider.java   |     5 +-
 .../timeline/AMSReportPropertyProvider.java     |     5 +-
 .../metrics/timeline/MetricsRequestHelper.java  |    91 +-
 .../cache/TimelineMetricCacheEntryFactory.java  |     9 +-
 .../listeners/alerts/AlertReceivedListener.java |     2 +-
 .../ambari/server/orm/dao/ClusterDAO.java       |    47 +-
 .../server/orm/dao/ClusterVersionDAO.java       |    17 +
 .../apache/ambari/server/orm/dao/CrudDAO.java   |     2 +-
 .../server/orm/dao/HostRoleCommandDAO.java      |    86 +
 .../ambari/server/orm/dao/HostVersionDAO.java   |    17 +
 .../server/orm/dao/RepositoryVersionDAO.java    |    12 +
 .../server/orm/dao/RoleAuthorizationDAO.java    |    67 +
 .../apache/ambari/server/orm/dao/UserDAO.java   |     6 +-
 .../orm/entities/ClusterConfigEntity.java       |     5 +-
 .../server/orm/entities/ClusterEntity.java      |     2 +-
 .../ambari/server/orm/entities/HostEntity.java  |    13 +-
 .../ambari/server/orm/entities/HostEntity_.java |    44 +
 .../orm/entities/HostRoleCommandEntity_.java    |    68 +-
 .../server/orm/entities/HostVersionEntity.java  |     1 -
 .../server/orm/entities/PermissionEntity.java   |    44 +-
 .../orm/entities/RepositoryVersionEntity.java   |    23 +-
 .../server/orm/entities/ResourceTypeEntity.java |    14 -
 .../orm/entities/RoleAuthorizationEntity.java   |   114 +
 .../server/orm/entities/UpgradeEntity.java      |     2 +-
 .../AmbariAuthorizationFilter.java              |   144 +-
 .../AmbariLocalUserDetailsService.java          |     4 +-
 .../authorization/AuthorizationException.java   |    53 +
 .../authorization/AuthorizationHelper.java      |   192 +-
 .../authorization/LdapServerProperties.java     |     6 +-
 .../security/authorization/ResourceType.java    |    82 +
 .../authorization/RoleAuthorization.java        |   118 +
 .../internal/InternalAuthenticationToken.java   |    32 +-
 .../security/ldap/AmbariLdapDataPopulator.java  |    99 +-
 .../upgrades/FinalizeUpgradeAction.java         |     3 +-
 .../upgrades/UpdateDesiredStackAction.java      |    14 +-
 .../ambari/server/stack/MasterHostResolver.java |     4 +-
 .../ambari/server/state/PropertyInfo.java       |    11 +
 .../server/state/ServiceComponentHost.java      |     8 +
 .../server/state/ServiceComponentImpl.java      |     2 +-
 .../apache/ambari/server/state/ServiceImpl.java |     2 +-
 .../state/alert/AlertDefinitionFactory.java     |     4 +
 .../server/state/alert/RecoverySource.java      |    32 +
 .../ambari/server/state/alert/SourceType.java   |     5 +
 .../server/state/cluster/ClusterImpl.java       |    79 +-
 .../server/state/cluster/ClustersImpl.java      |    76 +-
 .../state/configgroup/ConfigGroupImpl.java      |     3 +-
 .../state/scheduler/RequestExecutionImpl.java   |     6 +-
 .../ambari/server/state/stack/UpgradePack.java  |    11 +-
 .../server/state/stack/upgrade/UpgradeType.java |     5 +
 .../svccomphost/ServiceComponentHostImpl.java   |    77 +-
 .../server/topology/AsyncCallableService.java   |   124 +
 .../ambari/server/topology/TopologyManager.java |    72 +-
 .../server/upgrade/AbstractUpgradeCatalog.java  |   159 +-
 .../server/upgrade/StackUpgradeHelper.java      |     2 +-
 .../server/upgrade/UpgradeCatalog150.java       |     1 +
 .../server/upgrade/UpgradeCatalog170.java       |     4 +-
 .../server/upgrade/UpgradeCatalog200.java       |    29 +
 .../server/upgrade/UpgradeCatalog210.java       |    77 +-
 .../server/upgrade/UpgradeCatalog213.java       |   302 +-
 .../server/upgrade/UpgradeCatalog220.java       |   224 +-
 .../apache/ambari/server/view/ViewRegistry.java |     7 +-
 ambari-server/src/main/python/ambari-server.py  |    15 +-
 .../python/ambari_server/dbConfiguration.py     |     8 +-
 .../ambari_server/dbConfiguration_linux.py      |    22 +
 .../main/python/ambari_server/serverUpgrade.py  |     8 +
 .../main/resources/Ambari-DDL-MySQL-CREATE.sql  |   234 +-
 .../main/resources/Ambari-DDL-Oracle-CREATE.sql |   234 +-
 .../resources/Ambari-DDL-Postgres-CREATE.sql    |   234 +-
 .../Ambari-DDL-Postgres-EMBEDDED-CREATE.sql     |   236 +-
 .../resources/Ambari-DDL-SQLAnywhere-CREATE.sql |   236 +-
 .../resources/Ambari-DDL-SQLServer-CREATE.sql   |   250 +-
 .../src/main/resources/META-INF/persistence.xml |     1 +
 .../1.6.1.2.2.0/configuration/accumulo-env.xml  |    13 +-
 .../configuration/accumulo-log4j.xml            |     2 +-
 .../AMBARI_METRICS/0.1.0/alerts.json            |    24 +
 .../0.1.0/configuration/ams-env.xml             |    16 +-
 .../0.1.0/configuration/ams-hbase-env.xml       |    36 +-
 .../0.1.0/configuration/ams-hbase-log4j.xml     |     2 +-
 .../0.1.0/configuration/ams-hbase-site.xml      |     6 +
 .../0.1.0/configuration/ams-log4j.xml           |     2 +-
 .../0.1.0/configuration/ams-site.xml            |     7 +-
 .../0.1.0/package/scripts/ams_service.py        |     8 +-
 .../0.1.0/package/scripts/functions.py          |    19 +-
 .../0.1.0/package/scripts/metrics_collector.py  |     2 +
 .../0.1.0/package/scripts/params.py             |    26 +-
 .../ATLAS/0.1.0.2.3/configuration/atlas-env.xml |     2 +-
 .../0.5.0.2.1/configuration/falcon-env.xml      |     2 +-
 .../1.4.0.2.0/configuration/flume-conf.xml      |     2 +-
 .../FLUME/1.4.0.2.0/configuration/flume-env.xml |     2 +-
 .../GANGLIA/3.5.0/configuration/ganglia-env.xml |     2 +-
 .../HAWQ/2.0.0/configuration/gpcheck-env.xml    |     2 +-
 .../2.0.0/configuration/hawq-limits-env.xml     |     2 +-
 .../HAWQ/2.0.0/configuration/hawq-site.xml      |    24 +
 .../common-services/HAWQ/2.0.0/metainfo.xml     |     2 +-
 .../0.96.0.2.0/configuration/hbase-env.xml      |    15 +-
 .../0.96.0.2.0/configuration/hbase-log4j.xml    |     2 +-
 .../0.96.0.2.0/package/scripts/params_linux.py  |     7 +-
 .../package/scripts/setup_ranger_hbase.py       |    27 +
 .../HDFS/2.1.0.2.0/configuration/hadoop-env.xml |    22 +-
 .../HDFS/2.1.0.2.0/configuration/hdfs-log4j.xml |     2 +-
 .../package/alerts/alert_checkpoint_time.py     |     7 +-
 .../package/alerts/alert_ha_namenode_health.py  |     5 +-
 .../package/alerts/alert_upgrade_finalized.py   |     8 +-
 .../2.1.0.2.0/package/scripts/hdfs_namenode.py  |    40 +-
 .../2.1.0.2.0/package/scripts/params_linux.py   |     7 +-
 .../package/scripts/setup_ranger_hdfs.py        |    29 +-
 .../HIVE/0.12.0.2.0/configuration/hcat-env.xml  |     2 +-
 .../HIVE/0.12.0.2.0/configuration/hive-env.xml  |     3 +-
 .../configuration/hive-exec-log4j.xml           |     2 +-
 .../0.12.0.2.0/configuration/hive-log4j.xml     |     2 +-
 .../HIVE/0.12.0.2.0/configuration/hive-site.xml |     2 +
 .../0.12.0.2.0/configuration/webhcat-env.xml    |     2 +-
 .../0.12.0.2.0/configuration/webhcat-log4j.xml  |     2 +-
 .../package/alerts/alert_hive_metastore.py      |    20 +-
 .../package/alerts/alert_hive_thrift_port.py    |    22 +-
 .../package/alerts/alert_webhcat_server.py      |    23 +-
 .../package/scripts/hive_metastore.py           |    19 +-
 .../0.12.0.2.0/package/scripts/params_linux.py  |    12 +-
 .../package/scripts/setup_ranger_hive.py        |    19 +
 .../KAFKA/0.8.1.2.2/configuration/kafka-env.xml |     2 +-
 .../0.8.1.2.2/configuration/kafka-log4j.xml     |     2 +-
 .../KAFKA/0.8.1.2.2/metainfo.xml                |     2 +
 .../KAFKA/0.8.1.2.2/package/scripts/kafka.py    |     4 +-
 .../KAFKA/0.8.1.2.2/package/scripts/params.py   |    49 +-
 .../package/scripts/setup_ranger_kafka.py       |    20 +
 .../KAFKA/0.8.1.2.2/package/scripts/upgrade.py  |    12 +-
 .../1.10.3-10/configuration/kerberos-env.xml    |     2 +-
 .../0.5.0.2.2/configuration/gateway-log4j.xml   |     2 +-
 .../KNOX/0.5.0.2.2/configuration/knox-env.xml   |    13 +-
 .../KNOX/0.5.0.2.2/configuration/ldap-log4j.xml |     2 +-
 .../KNOX/0.5.0.2.2/package/scripts/knox.py      |    20 +-
 .../0.5.0.2.2/package/scripts/params_linux.py   |    34 +-
 .../package/scripts/setup_ranger_knox.py        |    20 +
 .../1.0.0.2.3/configuration/mahout-env.xml      |     2 +-
 .../1.0.0.2.3/configuration/mahout-log4j.xml    |     2 +-
 .../OOZIE/4.0.0.2.0/configuration/oozie-env.xml |     2 +-
 .../4.0.0.2.0/configuration/oozie-log4j.xml     |     2 +-
 .../4.0.0.2.0/configuration/oozie-site.xml      |     2 +
 .../OOZIE/4.2.0.2.3/configuration/oozie-env.xml |     2 +-
 .../4.2.0.2.3/configuration/oozie-site.xml      |     2 +
 .../PIG/0.12.0.2.0/configuration/pig-env.xml    |     2 +-
 .../PIG/0.12.0.2.0/configuration/pig-log4j.xml  |     2 +-
 .../0.12.0.2.0/configuration/pig-properties.xml |     2 +-
 .../common-services/PXF/3.0.0/kerberos.json     |    35 +
 .../common-services/PXF/3.0.0/metainfo.xml      |     2 +-
 .../PXF/3.0.0/package/scripts/params.py         |     4 +
 .../PXF/3.0.0/package/scripts/pxf.py            |    10 +-
 .../0.4.0/configuration/admin-properties.xml    |     4 +-
 .../RANGER/0.4.0/configuration/ranger-env.xml   |     2 +-
 .../RANGER/0.4.0/package/scripts/params.py      |     9 +-
 .../0.5.0.2.3/configuration/kms-env.xml         |     2 +-
 .../0.5.0.2.3/configuration/kms-log4j.xml       |     2 +-
 .../0.5.0.2.3/package/scripts/params.py         |    16 +-
 .../0.60.0.2.2/configuration/slider-env.xml     |     2 +-
 .../0.60.0.2.2/configuration/slider-log4j.xml   |     2 +-
 .../SPARK/1.2.0.2.2/configuration/spark-env.xml |     8 +-
 .../configuration/spark-log4j-properties.xml    |     2 +-
 .../SPARK/1.2.0.2.2/package/scripts/params.py   |     7 +
 .../1.2.0.2.2/package/scripts/setup_spark.py    |     6 +-
 .../1.2.0.2.2/package/scripts/spark_service.py  |    10 +-
 .../1.2.0.2.2/package/scripts/status_params.py  |     7 +-
 .../SPARK/1.4.1.2.3/kerberos.json               |     3 +
 .../SPARK/1.4.1.2.3/metainfo.xml                |    11 +-
 .../SQOOP/1.4.4.2.0/configuration/sqoop-env.xml |     2 +-
 .../STORM/0.9.1.2.1/configuration/storm-env.xml |    38 +-
 .../STORM/0.9.1.2.1/metainfo.xml                |     2 +
 .../0.9.1.2.1/package/scripts/params_linux.py   |    42 +-
 .../STORM/0.9.1.2.1/package/scripts/service.py  |     4 +
 .../package/scripts/setup_ranger_storm.py       |    20 +
 .../TEZ/0.4.0.2.1/configuration/tez-env.xml     |     2 +-
 .../configuration-mapred/mapred-env.xml         |     2 +-
 .../YARN/2.1.0.2.0/configuration/yarn-env.xml   |     2 +-
 .../YARN/2.1.0.2.0/configuration/yarn-log4j.xml |     2 +-
 .../package/alerts/alert_nodemanager_health.py  |    15 +-
 .../alerts/alert_nodemanagers_summary.py        |    13 +-
 .../2.1.0.2.0/package/scripts/params_linux.py   |     7 +-
 .../YARN/2.1.0.2.0/package/scripts/service.py   |    19 +-
 .../package/scripts/setup_ranger_yarn.py        |    19 +
 .../YARN/2.1.0.2.0/package/scripts/yarn.py      |     1 +
 .../3.4.5.2.0/configuration/zookeeper-env.xml   |    12 +-
 .../3.4.5.2.0/configuration/zookeeper-log4j.xml |     2 +-
 .../custom_actions/scripts/check_host.py        |    13 +-
 .../services/FLUME/configuration/flume-conf.xml |     2 +-
 .../services/FLUME/configuration/flume-env.xml  |     2 +-
 .../FLUME/configuration/flume-log4j.xml         |     2 +-
 .../GANGLIA/configuration/ganglia-env.xml       |     2 +-
 .../services/HBASE/configuration/hbase-env.xml  |     8 +-
 .../HBASE/configuration/hbase-log4j.xml         |     2 +-
 .../services/HBASE/configuration/hbase-site.xml |     3 +
 .../services/HDFS/configuration/hadoop-env.xml  |     2 +-
 .../services/HDFS/configuration/hdfs-log4j.xml  |     2 +-
 .../services/HIVE/configuration/hcat-env.xml    |     2 +-
 .../services/HIVE/configuration/hive-env.xml    |     5 +-
 .../HIVE/configuration/hive-exec-log4j.xml      |     2 +-
 .../services/HIVE/configuration/hive-log4j.xml  |     2 +-
 .../services/HIVE/configuration/hive-site.xml   |     3 +
 .../services/HIVE/configuration/webhcat-env.xml |     2 +-
 .../services/OOZIE/configuration/oozie-env.xml  |     2 +-
 .../OOZIE/configuration/oozie-log4j.xml         |     2 +-
 .../services/OOZIE/configuration/oozie-site.xml |     3 +
 .../0.8/services/PIG/configuration/pig-env.xml  |     2 +-
 .../services/PIG/configuration/pig-log4j.xml    |     2 +-
 .../PIG/configuration/pig-properties.xml        |     2 +-
 .../YARN/configuration-mapred/mapred-env.xml    |     2 +-
 .../services/YARN/configuration/yarn-env.xml    |     2 +-
 .../services/YARN/configuration/yarn-log4j.xml  |     2 +-
 .../ZOOKEEPER/configuration/zookeeper-env.xml   |     2 +-
 .../ZOOKEEPER/configuration/zookeeper-log4j.xml |     2 +-
 .../services/HBASE/configuration/hbase-site.xml |     3 +
 .../PIG/configuration/pig-properties.xml        |     2 +-
 .../before-START/files/fast-hdfs-resource.jar   |   Bin 19285282 -> 19285353 bytes
 .../stacks/HDP/2.0.6/services/stack_advisor.py  |   148 +-
 .../FALCON/configuration/falcon-env.xml         |     2 +-
 .../GLUSTERFS/configuration/hadoop-env.xml      |     2 +-
 .../services/HBASE/configuration/hbase-site.xml |     3 +
 .../PIG/configuration/pig-properties.xml        |     2 +-
 .../services/STORM/configuration/storm-env.xml  |     2 +-
 .../services/TEZ/configuration/tez-env.xml      |     2 +-
 .../services/YARN/configuration/yarn-env.xml    |     2 +-
 .../services/HIVE/configuration/hive-site.xml   |     1 +
 .../services/OOZIE/configuration/oozie-site.xml |     1 +
 .../PIG/configuration/pig-properties.xml        |     2 +-
 .../services/YARN/configuration/yarn-env.xml    |     2 +-
 .../HDP/2.1/upgrades/nonrolling-upgrade-2.3.xml |    53 +-
 .../services/HBASE/configuration/hbase-env.xml  |     4 +-
 .../services/HDFS/configuration/hadoop-env.xml  |    12 +-
 .../services/HDFS/configuration/hdfs-log4j.xml  |     2 +-
 .../services/HIVE/configuration/hive-env.xml    |     2 +-
 .../services/HIVE/configuration/hive-site.xml   |    14 +
 .../services/OOZIE/configuration/oozie-env.xml  |     2 +-
 .../services/STORM/configuration/storm-env.xml  |     2 +-
 .../2.2/services/TEZ/configuration/tez-site.xml |    12 +
 .../YARN/configuration-mapred/mapred-env.xml    |     2 +-
 .../YARN/configuration/capacity-scheduler.xml   |     2 +-
 .../services/YARN/configuration/yarn-env.xml    |     2 +-
 .../stacks/HDP/2.2/services/YARN/kerberos.json  |     3 +-
 .../stacks/HDP/2.2/services/stack_advisor.py    |    76 +-
 .../HDP/2.2/upgrades/nonrolling-upgrade-2.3.xml |     9 +-
 .../stacks/HDP/2.2/upgrades/upgrade-2.3.xml     |     3 +-
 .../services/ECS/configuration/hadoop-env.xml   |     2 +-
 .../services/HBASE/configuration/hbase-env.xml  |     2 +-
 .../ACCUMULO/configuration/accumulo-log4j.xml   |     2 +-
 .../GLUSTERFS/configuration/hadoop-env.xml      |     2 +-
 .../services/HBASE/configuration/hbase-site.xml |     3 +
 .../services/HIVE/configuration/hive-site.xml   |     3 +
 .../services/OOZIE/configuration/oozie-site.xml |     3 +
 .../YARN/configuration/capacity-scheduler.xml   |     2 +-
 .../stacks/HDP/2.3/role_command_order.json      |     8 +-
 .../ACCUMULO/configuration/accumulo-log4j.xml   |     2 +-
 .../FALCON/configuration/falcon-env.xml         |     2 +-
 .../services/HBASE/configuration/hbase-env.xml  |     2 +-
 .../services/HDFS/configuration/hadoop-env.xml  |    12 +-
 .../services/HIVE/configuration/hive-env.xml    |     2 +-
 .../services/KAFKA/configuration/kafka-env.xml  |     2 +-
 .../KNOX/configuration/knoxsso-topology.xml     |    94 +
 .../PIG/configuration/pig-properties.xml        |     2 +-
 .../RANGER/configuration/admin-properties.xml   |    23 -
 .../RANGER/configuration/ranger-admin-site.xml  |    58 +
 .../RANGER/configuration/ranger-env.xml         |     2 +-
 .../configuration/spark-hive-site-override.xml  |     4 +-
 .../STORM/configuration/storm-cluster-log4j.xml |     2 +-
 .../services/STORM/configuration/storm-env.xml  |     2 +-
 .../STORM/configuration/storm-worker-log4j.xml  |     2 +-
 .../services/YARN/configuration/yarn-env.xml    |     2 +-
 .../services/YARN/configuration/yarn-log4j.xml  |     2 +-
 .../services/YARN/configuration/yarn-site.xml   |     3 +
 .../stacks/HDP/2.3/services/stack_advisor.py    |    54 +-
 .../stacks/HDP/2.3/upgrades/config-upgrade.xml  |    18 +-
 .../KNOX/configuration/knoxsso-topology.xml     |    94 -
 .../AMBARI_METRICS/configuration/ams-env.xml    |    14 +-
 .../configuration/ams-hbase-env.xml             |     2 +-
 .../AMBARI_METRICS/configuration/ams-log4j.xml  |     2 +-
 .../FALCON/configuration/falcon-env.xml         |     2 +-
 .../services/FLUME/configuration/flume-conf.xml |     2 +-
 .../services/FLUME/configuration/flume-env.xml  |     2 +-
 .../services/HBASE/configuration/hbase-env.xml  |     2 +-
 .../services/HDFS/configuration/hadoop-env.xml  |     2 +-
 .../services/HIVE/configuration/hcat-env.xml    |     2 +-
 .../services/HIVE/configuration/hive-env.xml    |     2 +-
 .../services/HIVE/configuration/webhcat-env.xml |     2 +-
 .../services/OOZIE/configuration/oozie-env.xml  |     2 +-
 .../services/SQOOP/configuration/sqoop-env.xml  |     2 +-
 .../services/STORM/configuration/storm-env.xml  |     2 +-
 .../2.1/services/TEZ/configuration/tez-env.xml  |     2 +-
 .../YARN/configuration-mapred/mapred-env.xml    |     2 +-
 .../services/YARN/configuration/yarn-env.xml    |     2 +-
 .../ZOOKEEPER/configuration/zookeeper-env.xml   |     2 +-
 .../services/HBASE/configuration/hbase-env.xml  |     2 +-
 .../services/HDFS/configuration/hadoop-env.xml  |     2 +-
 .../services/HDFS/configuration/hdfs-log4j.xml  |     2 +-
 .../services/HIVE/configuration/hive-env.xml    |     2 +-
 .../services/HIVE/configuration/hive-site.xml   |     2 +
 .../services/KNOX/configuration/knox-env.xml    |     2 +-
 .../YARN/configuration/capacity-scheduler.xml   |     2 +-
 .../services/YARN/configuration/yarn-env.xml    |     2 +-
 .../services/OOZIE/configuration/oozie-env.xml  |     2 +-
 .../YARN/configuration/capacity-scheduler.xml   |     2 +-
 .../webapp/WEB-INF/spring-security.xml          |     8 +-
 .../ExecutionCommandWrapperTest.java            |     2 -
 .../server/agent/TestHeartbeatHandler.java      |     5 +-
 .../server/api/handlers/CreateHandlerTest.java  |    32 +
 .../server/api/handlers/DeleteHandlerTest.java  |    30 +
 .../api/handlers/QueryCreateHandlerTest.java    |   102 +
 .../server/api/handlers/ReadHandlerTest.java    |    35 +
 .../server/api/handlers/UpdateHandlerTest.java  |    33 +
 .../services/RoleAuthorizationServiceTest.java  |    86 +
 .../services/UserAuthorizationServiceTest.java  |    87 +
 .../services/ViewSubResourceServiceTest.java    |     9 +-
 .../serializers/JsonSerializerTest.java         |    26 +-
 .../StackAdvisorBlueprintProcessorTest.java     |     2 +
 .../AmbariManagementControllerImplTest.java     |   101 +-
 .../AmbariManagementControllerTest.java         |   232 +-
 .../server/controller/AmbariServerTest.java     |    32 +-
 .../controller/AuthToLocalBuilderTest.java      |    35 +-
 ...hYarnCapacitySchedulerReleaseConfigTest.java |     2 +-
 .../internal/AbstractResourceProviderTest.java  |    76 +-
 .../ActiveWidgetLayoutResourceProviderTest.java |   458 +-
 .../AlertTargetResourceProviderTest.java        |    11 +-
 .../AmbariPrivilegeResourceProviderTest.java    |   820 +-
 .../BlueprintConfigurationProcessorTest.java    |    33 +-
 .../ClusterPrivilegeResourceProviderTest.java   |   501 +-
 ...ClusterStackVersionResourceProviderTest.java |    13 +-
 ...leRepositoryVersionResourceProviderTest.java |     4 +-
 .../internal/RequestResourceProviderTest.java   |    78 +-
 .../RoleAuthorizationResourceProviderTest.java  |   202 +
 .../StackAdvisorResourceProviderTest.java       |    40 +
 .../StackDependencyResourceProviderTest.java    |     4 +-
 .../internal/TaskResourceProviderTest.java      |    54 +-
 .../UserAuthorizationResourceProviderTest.java  |   411 +
 .../UserPrivilegeResourceProviderTest.java      |   350 +-
 .../internal/UserResourceProviderTest.java      |   646 +-
 .../ViewPrivilegeResourceProviderTest.java      |     6 +
 .../internal/WidgetResourceProviderTest.java    |     4 +-
 .../ganglia/GangliaPropertyProviderTest.java    |    25 +-
 .../timeline/MetricsRequestHelperTest.java      |   118 +
 .../timeline/cache/TimelineMetricCacheTest.java |    13 +-
 .../api/AmbariHttpWebRequest.java               |   393 +
 .../api/ClusterConfigParams.java                |    84 +
 .../functionaltests/api/ConnectionParams.java   |    89 +
 .../server/functionaltests/api/WebRequest.java  |   192 +
 .../server/functionaltests/api/WebResponse.java |    57 +
 .../AddDesiredConfigurationWebRequest.java      |   108 +
 .../api/cluster/CreateClusterWebRequest.java    |    88 +
 .../cluster/CreateConfigurationWebRequest.java  |    87 +
 .../api/cluster/GetAllClustersWebRequest.java   |    53 +
 .../api/cluster/GetClusterWebRequest.java       |    49 +
 .../api/cluster/GetRequestStatusWebRequest.java |    78 +
 .../api/host/AddHostWebRequest.java             |    63 +
 .../api/host/GetHostWebRequest.java             |    56 +
 .../api/host/GetRegisteredHostWebRequest.java   |    59 +
 .../api/host/RegisterHostWebRequest.java        |    59 +
 .../api/service/AddServiceWebRequest.java       |    98 +
 .../api/service/DeleteServiceWebRequest.java    |    67 +
 .../api/service/GetServiceWebRequest.java       |    67 +
 .../api/service/InstallServiceWebRequest.java   |    39 +
 .../api/service/SetServiceStateWebRequest.java  |    97 +
 .../api/service/StartServiceWebRequest.java     |    38 +
 .../api/service/StopServiceWebRequest.java      |    38 +
 .../AddServiceComponentWebRequest.java          |    69 +
 .../GetServiceComponentWebRequest.java          |    69 +
 .../SetServiceComponentStateWebRequest.java     |    87 +
 .../AddServiceComponentHostWebRequest.java      |    69 +
 .../BulkAddServiceComponentHostsWebRequest.java |   127 +
 ...kSetServiceComponentHostStateWebRequest.java |    91 +
 .../GetServiceComponentHostWebRequest.java      |    69 +
 .../SetServiceComponentHostStateWebRequest.java |    89 +
 .../server/DeleteServiceTest.java               |   197 +
 .../server/LocalAmbariServer.java               |    31 +-
 .../functionaltests/server/ServerTestBase.java  |   146 +
 .../server/StartStopServerTest.java             |    71 +-
 .../functionaltests/utils/ClusterUtils.java     |   247 +
 .../functionaltests/utils/RestApiUtils.java     |    52 +
 .../server/metadata/RoleCommandOrderTest.java   |    37 +-
 .../apache/ambari/server/orm/OrmTestHelper.java |     9 +-
 .../apache/ambari/server/orm/TestOrmImpl.java   |    13 +-
 .../server/orm/dao/ConfigGroupDAOTest.java      |     7 +-
 .../ambari/server/orm/dao/CrudDAOTest.java      |     2 +-
 .../server/orm/dao/HostVersionDAOTest.java      |     5 +-
 .../ambari/server/orm/dao/RequestDAOTest.java   |     5 +-
 .../server/orm/dao/RequestScheduleDAOTest.java  |     7 +-
 .../server/orm/dao/ServiceConfigDAOTest.java    |    13 +-
 .../scheduler/ExecutionScheduleManagerTest.java |     2 +-
 .../security/TestAuthenticationFactory.java     |   164 +
 .../AmbariAuthorizationFilterTest.java          |    77 +-
 .../authorization/AuthorizationHelperTest.java  |   186 +-
 .../authorization/LdapServerPropertiesTest.java |     5 +-
 .../authorization/ResourceTypeTest.java         |    61 +
 .../authorization/RoleAuthorizationTest.java    |    35 +
 .../security/authorization/TestUsers.java       |     4 +-
 .../ldap/AmbariLdapDataPopulatorTest.java       |    66 +-
 .../upgrades/UpgradeActionTest.java             |    85 +-
 .../ambari/server/stack/StackManagerTest.java   |     2 +-
 .../ambari/server/state/ConfigGroupTest.java    |     2 +-
 .../ambari/server/state/ConfigHelperTest.java   |     2 +-
 .../ambari/server/state/PropertyInfoTest.java   |    42 +-
 .../server/state/RequestExecutionTest.java      |     2 +-
 .../ambari/server/state/UpgradeHelperTest.java  |    62 +-
 .../server/state/cluster/ClusterTest.java       |    40 +-
 .../topology/AsyncCallableServiceTest.java      |   166 +
 .../topology/BlueprintValidatorImplTest.java    |     3 +-
 .../topology/ConfigureClusterTaskTest.java      |   129 +
 .../server/topology/TopologyManagerTest.java    |    16 +-
 .../server/update/HostUpdateHelperTest.java     |    12 +-
 .../server/upgrade/UpgradeCatalog200Test.java   |    34 +-
 .../server/upgrade/UpgradeCatalog210Test.java   |     2 +-
 .../server/upgrade/UpgradeCatalog213Test.java   |   423 +-
 .../server/upgrade/UpgradeCatalog220Test.java   |   185 +-
 .../server/upgrade/UpgradeCatalogHelper.java    |     7 +-
 .../utils/CollectionPresentationUtils.java      |    73 +
 .../ambari/server/view/ViewRegistryTest.java    |    16 -
 .../view/persistence/DataStoreImplTest.java     |     2 +-
 .../InstanceValidationResultImplTest.java       |     3 +-
 .../src/test/python/TestAmbariServer.py         |    94 +-
 .../test/python/custom_actions/TestCheckHost.py |     2 +-
 .../AMBARI_METRICS/test_metrics_collector.py    |    30 +
 .../stacks/2.0.6/HBASE/test_hbase_client.py     |     2 +-
 .../stacks/2.0.6/HBASE/test_hbase_master.py     |     2 +-
 .../2.0.6/HBASE/test_hbase_regionserver.py      |     2 +-
 .../2.0.6/HBASE/test_phoenix_queryserver.py     |     2 +-
 .../python/stacks/2.0.6/HDFS/test_datanode.py   |     2 +-
 .../stacks/2.0.6/HDFS/test_hdfs_client.py       |     2 +-
 .../stacks/2.0.6/HDFS/test_journalnode.py       |     2 +-
 .../python/stacks/2.0.6/HDFS/test_namenode.py   |   121 +-
 .../python/stacks/2.0.6/HDFS/test_nfsgateway.py |     2 +-
 .../stacks/2.0.6/HIVE/test_hive_client.py       |     2 +-
 .../stacks/2.0.6/HIVE/test_hive_server.py       |     2 +-
 .../2.0.6/HIVE/test_hive_service_check.py       |     6 +-
 .../stacks/2.0.6/HIVE/test_webhcat_server.py    |     2 +-
 .../stacks/2.0.6/OOZIE/test_oozie_client.py     |     2 +-
 .../stacks/2.0.6/OOZIE/test_oozie_server.py     |     4 +-
 .../python/stacks/2.0.6/PIG/test_pig_client.py  |     2 +-
 .../python/stacks/2.0.6/SQOOP/test_sqoop.py     |     2 +-
 .../stacks/2.0.6/YARN/test_historyserver.py     |    34 +-
 .../stacks/2.0.6/YARN/test_mapreduce2_client.py |     4 +-
 .../stacks/2.0.6/YARN/test_nodemanager.py       |    34 +-
 .../stacks/2.0.6/YARN/test_resourcemanager.py   |    36 +-
 .../stacks/2.0.6/YARN/test_yarn_client.py       |     5 +-
 .../2.0.6/ZOOKEEPER/test_zookeeper_client.py    |     2 +-
 .../2.0.6/ZOOKEEPER/test_zookeeper_server.py    |     2 +-
 .../stacks/2.0.6/common/test_stack_advisor.py   |   139 +-
 .../stacks/2.1/FALCON/test_falcon_client.py     |     2 +-
 .../stacks/2.1/FALCON/test_falcon_server.py     |    14 +-
 .../stacks/2.1/HIVE/test_hive_metastore.py      |    76 +-
 .../stacks/2.1/STORM/test_storm_drpc_server.py  |     2 +-
 .../stacks/2.1/STORM/test_storm_nimbus.py       |     2 +-
 .../stacks/2.1/STORM/test_storm_nimbus_prod.py  |     2 +-
 .../stacks/2.1/STORM/test_storm_supervisor.py   |     2 +-
 .../2.1/STORM/test_storm_supervisor_prod.py     |     2 +-
 .../stacks/2.1/STORM/test_storm_ui_server.py    |     2 +-
 .../python/stacks/2.1/TEZ/test_tez_client.py    |     2 +-
 .../stacks/2.1/YARN/test_apptimelineserver.py   |    31 +-
 .../stacks/2.2/ACCUMULO/test_accumulo_client.py |     2 +-
 .../stacks/2.2/KAFKA/test_kafka_broker.py       |     2 +-
 .../python/stacks/2.2/KNOX/test_knox_gateway.py |    26 +-
 .../stacks/2.2/RANGER/test_ranger_admin.py      |     2 +-
 .../stacks/2.2/RANGER/test_ranger_usersync.py   |     2 +-
 .../stacks/2.2/SLIDER/test_slider_client.py     |     2 +-
 .../stacks/2.2/SPARK/test_job_history_server.py |    12 +-
 .../stacks/2.2/SPARK/test_spark_client.py       |    10 +-
 .../stacks/2.2/common/test_stack_advisor.py     |   169 +-
 .../stacks/2.3/MAHOUT/test_mahout_client.py     |     2 +-
 .../2.3/SPARK/test_spark_thrift_server.py       |    13 +-
 .../test/python/stacks/2.3/YARN/test_ats_1_5.py |     1 +
 .../stacks/2.3/common/services-hawq-1-host.json |     1 -
 .../2.3/common/services-hawq-3-hosts.json       |     1 -
 .../services-master_ambari_colo-3-hosts.json    |     1 -
 .../services-master_standby_colo-3-hosts.json   |     1 -
 .../2.3/common/services-nohawq-3-hosts.json     |     1 -
 .../common/services-normal-hawq-3-hosts.json    |     1 -
 .../common/services-normal-nohawq-3-hosts.json  |     1 -
 .../2.3/common/services-sparkts-hive.json       | 10043 +++++++++++++++++
 .../stacks/2.3/common/services-sparkts.json     |  5860 ++++++++++
 .../services-standby_ambari_colo-3-hosts.json   |     1 -
 .../python/stacks/2.3/common/sparkts-host.json  |   220 +
 .../stacks/2.3/common/test_stack_advisor.py     |   101 +-
 .../stacks/2.3/configs/spark_default.json       |     6 +-
 .../test_kerberos_descriptor_2_1_3.json         |  1316 +++
 .../services/FLUME/configuration/flume-conf.xml |     2 +-
 .../services/FLUME/configuration/flume-conf.xml |     2 +-
 .../stacks/HDP/2.1.1/role_command_order.json    |     2 +-
 .../PIG/configuration/pig-properties.xml        |     2 +-
 .../stacks/HDP/2.2.0/role_command_order.json    |     2 +-
 ambari-web/app/app.js                           |    24 +-
 ambari-web/app/assets/data/services/ambari.json |     1 -
 ambari-web/app/assets/test/tests.js             |     5 +-
 .../controllers/global/cluster_controller.js    |    18 +-
 .../global/user_settings_controller.js          |    65 +-
 .../global/wizard_watcher_controller.js         |     4 +-
 ambari-web/app/controllers/main.js              |     1 +
 .../nameNode/step3_controller.js                |     4 +-
 .../rangerAdmin/step1_controller.js             |     4 +-
 .../app/controllers/main/admin/kerberos.js      |    34 +-
 .../main/admin/kerberos/step2_controller.js     |    41 +-
 .../main/admin/kerberos/step4_controller.js     |     5 +-
 .../main/admin/kerberos/step5_controller.js     |     4 +-
 .../main/admin/kerberos/step8_controller.js     |     5 +-
 .../main/admin/kerberos/wizard_controller.js    |    58 +-
 .../main/admin/serviceAccounts_controller.js    |    28 +-
 .../main/admin/stack_and_upgrade_controller.js  |    42 +-
 .../add_alert_definition/step1_controller.js    |     7 +-
 .../main/alerts/alert_instances_controller.js   |     4 +-
 .../alerts/definition_configs_controller.js     |     9 +-
 .../alerts/manage_alert_groups_controller.js    |     4 +-
 .../manage_alert_notifications_controller.js    |     8 +-
 .../app/controllers/main/charts/heatmap.js      |    12 +-
 ambari-web/app/controllers/main/dashboard.js    |     4 +
 ambari-web/app/controllers/main/host/details.js |   114 +-
 ambari-web/app/controllers/main/service.js      |    63 +-
 .../controllers/main/service/add_controller.js  |     9 +-
 .../controllers/main/service/info/configs.js    |    26 +-
 .../controllers/main/service/info/summary.js    |    12 +-
 ambari-web/app/controllers/main/service/item.js |    32 +-
 .../service/manage_config_groups_controller.js  |     4 +-
 .../service/widgets/create/step2_controller.js  |     8 +-
 .../service/widgets/create/step3_controller.js  |    20 +-
 .../wizard/slave_component_groups_controller.js |    10 +-
 .../app/controllers/wizard/step2_controller.js  |     8 +-
 .../app/controllers/wizard/step3_controller.js  |     2 +-
 .../app/controllers/wizard/step6_controller.js  |    21 +-
 .../app/controllers/wizard/step7_controller.js  |   351 +-
 .../app/controllers/wizard/step8_controller.js  |    97 +-
 ambari-web/app/data/HDP2.3/site_properties.js   |    44 +-
 ambari-web/app/data/HDP2/site_properties.js     |     2 +-
 ambari-web/app/mappers.js                       |     3 +-
 .../mappers/alert_definition_summary_mapper.js  |     3 +-
 .../app/mappers/alert_definitions_mapper.js     |     4 +
 .../app/mappers/alert_instances_mapper.js       |     1 +
 .../mappers/configs/config_versions_mapper.js   |   143 -
 .../configs/service_config_version_mapper.js    |   117 +
 .../configs/stack_config_properties_mapper.js   |     4 +-
 ambari-web/app/mappers/configs/themes_mapper.js |     1 +
 ambari-web/app/mappers/hosts_mapper.js          |     2 +
 .../mappers/service_config_version_mapper.js    |   116 -
 ambari-web/app/mappers/service_mapper.js        |     2 +
 ambari-web/app/messages.js                      |    11 +-
 .../app/mixins/common/configs/configs_loader.js |    17 +-
 .../app/mixins/common/configs/configs_saver.js  |     8 +-
 .../mixins/common/configs/enhanced_configs.js   |     8 +-
 ambari-web/app/mixins/common/serverValidator.js |     8 +-
 .../main/service/configs/config_overridable.js  |    11 +-
 .../unit_convert/base_unit_convert_mixin.js     |     2 +-
 ambari-web/app/models.js                        |     1 -
 ambari-web/app/models/alerts/alert_config.js    |     5 +-
 .../app/models/alerts/alert_definition.js       |    16 +-
 ambari-web/app/models/alerts/alert_instance.js  |    11 +-
 ambari-web/app/models/cluster_states.js         |     5 +-
 ambari-web/app/models/configs/config_group.js   |     4 +-
 .../app/models/configs/config_property.js       |   236 -
 ambari-web/app/models/configs/config_version.js |    34 -
 .../configs/objects/service_config_category.js  |    13 +-
 .../configs/objects/service_config_property.js  |    18 +-
 .../models/configs/service_config_version.js    |    25 +-
 .../app/models/configs/stack_config_property.js |     2 +-
 ambari-web/app/models/configs/theme/section.js  |     8 +-
 .../app/models/configs/theme/sub_section.js     |    20 +-
 .../app/models/configs/theme/sub_section_tab.js |    10 +-
 ambari-web/app/models/configs/theme/tab.js      |     5 +-
 ambari-web/app/models/host.js                   |     5 +-
 ambari-web/app/models/host_component.js         |    33 +-
 ambari-web/app/models/host_stack_version.js     |     4 +-
 ambari-web/app/models/root_service.js           |     8 +-
 ambari-web/app/models/service.js                |     4 +-
 ambari-web/app/models/stack_service.js          |    25 +-
 .../app/models/stack_service_component.js       |     4 +-
 .../models/stack_version/repository_version.js  |     8 +-
 ambari-web/app/models/stack_version/version.js  |     4 +-
 ambari-web/app/models/widget_property.js        |     4 +-
 ambari-web/app/router.js                        |    82 +-
 ambari-web/app/routes/add_kerberos_routes.js    |    34 +-
 ambari-web/app/routes/main.js                   |     9 +-
 ambari-web/app/styles/application.less          |     6 +-
 .../modal_popups/cluster_check_dialog.hbs       |    24 +-
 ambari-web/app/templates/common/settings.hbs    |    55 +
 .../main/host/details/deleteComponentPopup.hbs  |    19 +-
 .../main/service/all_services_actions.hbs       |     8 +
 ambari-web/app/utils/ajax/ajax.js               |     6 +-
 ambari-web/app/utils/blueprint.js               |    55 +-
 ambari-web/app/utils/config.js                  |   104 +-
 .../app/utils/configs/config_initializer.js     |    21 +-
 ambari-web/app/utils/credentials.js             |    18 +-
 ambari-web/app/utils/ember_computed.js          |   681 +-
 ambari-web/app/utils/validator.js               |    10 +
 .../common/ajax_default_error_popup_body.js     |     8 +-
 .../app/views/common/chart/linear_time.js       |     4 +-
 ambari-web/app/views/common/chart/pie.js        |     4 +-
 .../views/common/configs/config_history_flow.js |    28 +-
 .../app/views/common/configs/controls_view.js   |    14 +-
 .../notification_configs_view.js                |     4 +-
 .../views/common/configs/service_config_view.js |     3 +-
 .../configs/service_configs_by_category_view.js |     6 +-
 .../configs/widgets/config_widget_view.js       |     8 +-
 .../configs/widgets/plain_config_text_field.js  |     4 +-
 .../widgets/slider_config_widget_view.js        |    43 +-
 .../widgets/test_db_connection_widget_view.js   |    22 +-
 ambari-web/app/views/common/controls_view.js    |    32 +-
 .../app/views/common/form/spinner_input_view.js |     4 +-
 .../common/modal_popups/cluster_check_popup.js  |   107 +-
 .../common/modal_popups/invalid_KDC_popup.js    |     5 +-
 .../app/views/common/rolling_restart_view.js    |    17 +-
 ambari-web/app/views/common/table_view.js       |     4 +-
 ambari-web/app/views/common/time_range.js       |     8 +-
 .../highAvailability/nameNode/step1_view.js     |     4 +-
 .../stack_upgrade/failed_hosts_modal_view.js    |     4 +-
 .../main/admin/stack_upgrade/services_view.js   |    12 +-
 .../admin/stack_upgrade/upgrade_wizard_view.js  |    15 +-
 .../main/admin/stack_upgrade/versions_view.js   |     4 +-
 .../app/views/main/alert_definitions_view.js    |     8 +-
 .../main/alerts/definition_details_view.js      |     4 +-
 .../views/main/dashboard/config_history_view.js |    18 +-
 .../dashboard/widgets/cluster_metrics_widget.js |     4 +-
 .../main/dashboard/widgets/flume_agent_live.js  |     4 +-
 .../views/main/dashboard/widgets/hbase_links.js |     4 +-
 .../views/main/dashboard/widgets/hdfs_links.js  |     4 +-
 ambari-web/app/views/main/host.js               |     8 +-
 .../main/host/details/host_component_view.js    |     2 +-
 .../host_component_views/datanode_view.js       |    30 +-
 .../app/views/main/host/host_alerts_view.js     |     4 +-
 .../app/views/main/host/stack_versions_view.js  |     4 +-
 ambari-web/app/views/main/host/summary.js       |     8 +-
 ambari-web/app/views/main/menu.js               |     4 +-
 ambari-web/app/views/main/service/menu.js       |    12 +-
 .../views/main/service/reassign/step4_view.js   |    14 +-
 .../views/main/service/reassign/step6_view.js   |    14 +-
 .../app/views/main/service/services/hbase.js    |     4 +-
 .../app/views/main/service/services/storm.js    |     4 +-
 .../service/widgets/create/expression_view.js   |     8 +-
 ambari-web/app/views/wizard/step1_view.js       |     4 +-
 ambari-web/app/views/wizard/step9_view.js       |     4 +-
 .../global/cluster_controller_test.js           |    42 +-
 .../admin/kerberos/step4_controller_test.js     |     4 +-
 .../controllers/main/admin/kerberos_test.js     |     4 +-
 .../admin/serviceAccounts_controller_test.js    |    82 -
 .../admin/stack_and_upgrade_controller_test.js  |     2 +-
 .../test/controllers/main/host/details_test.js  |    68 +-
 .../test/controllers/main/service_test.js       |   103 +
 .../test/controllers/wizard/step2_test.js       |    10 +-
 .../test/controllers/wizard/step6_test.js       |    10 +-
 .../test/controllers/wizard/step7_test.js       |   233 +-
 .../test/controllers/wizard/step8_test.js       |   202 +-
 .../test/data/HDP2.2/site_properties_test.js    |     2 +-
 .../test/data/HDP2.3/site_properties_test.js    |     5 +-
 .../test/data/HDP2/site_properties_test.js      |     2 +-
 .../configs/config_versions_mapper_test.js      |   119 -
 .../service_config_version_mapper_test.js       |   119 +
 .../test/mappers/configs/themes_mapper_test.js  |     1 -
 .../test/models/alerts/alert_definition_test.js |     4 +-
 .../test/models/configs/config_property_test.js |    66 -
 ambari-web/test/models/host_component_test.js   |    25 +
 ambari-web/test/models/stack_service_test.js    |    15 +-
 ambari-web/test/utils/blueprint_test.js         |    39 +-
 ambari-web/test/utils/config_test.js            |   119 +-
 ambari-web/test/utils/ember_computed_test.js    |   958 +-
 .../common/configs/service_config_view_test.js  |    16 +-
 .../widgets/slider_config_widget_view_test.js   |    58 +-
 .../widgets/time_interval_spinner_view_test.js  |     4 +-
 .../modal_popups/cluster_check_popup_test.js    |   271 +
 .../stack_upgrade/upgrade_wizard_view_test.js   |     8 +
 ambari-web/test/views/main/service/item_test.js |    77 +-
 .../ambari-scom-server/conf/ambari.properties   |     1 -
 .../org/apache/ambari/scom/AmbariServer.java    |     2 +-
 .../src/test/resources/ambari.properties        |     3 +-
 .../ambari/fast_hdfs_resource/Resource.java     |     2 +-
 contrib/views/capacity-scheduler/pom.xml        |     5 +
 .../capacityscheduler/PropertyValidator.java    |    28 +-
 .../PropertyValidatorTest.java                  |    34 +
 .../view/hive/resources/files/FileService.java  |     9 +-
 .../jobs/ResultsPaginationController.java       |    16 +-
 .../hive/resources/uploads/UploadService.java   |     5 +-
 .../ui/hive-web/app/components/query-editor.js  |    16 +-
 .../hive-web/app/components/typeahead-widget.js |    15 +
 .../ui/hive-web/app/controllers/index.js        |    10 +-
 .../hive-web/app/helpers/format-column-type.js  |    39 +
 .../ui/hive-web/app/services/database.js        |    12 +-
 .../resources/ui/hive-web/app/styles/app.scss   |     5 +
 .../hive-web/app/templates/databases-tree.hbs   |     2 +-
 .../ui/hive-web/app/templates/settings.hbs      |    21 +-
 .../ui/hive-web/app/templates/upload-table.hbs  |     8 +-
 .../ui/hive-web/app/utils/constants.js          |     5 +
 .../src/main/resources/ui/hive-web/bower.json   |     4 +-
 .../tests/unit/controllers/index-test.js        |    50 +
 .../vendor/codemirror/codemirror-min.js         |     2 +-
 .../hive/resources/files/FileServiceTest.java   |    54 +
 .../resources/ui/app/models/config_property.js  |     3 +
 .../ui/app/templates/common/config.hbs          |     2 +-
 dev-support/docker/docker/Dockerfile            |     3 +-
 docs/pom.xml                                    |     8 +
 794 files changed, 38718 insertions(+), 6469 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/6ae63efd/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/RepositoryVersionDAO.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/6ae63efd/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/RepositoryVersionEntity.java
----------------------------------------------------------------------
diff --cc ambari-server/src/main/java/org/apache/ambari/server/orm/entities/RepositoryVersionEntity.java
index 7abaa04,b1e54de..e12e118
--- a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/RepositoryVersionEntity.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/RepositoryVersionEntity.java
@@@ -17,19 -17,13 +17,20 @@@
   */
  package org.apache.ambari.server.orm.entities;
  
 +import java.util.ArrayList;
 +import java.util.Collection;
  import java.util.Collections;
  import java.util.List;
+ import java.util.Set;
  
  import javax.persistence.CascadeType;
 +import javax.persistence.CollectionTable;
  import javax.persistence.Column;
 +import javax.persistence.ElementCollection;
 +import javax.persistence.Embeddable;
  import javax.persistence.Entity;
 +import javax.persistence.EnumType;
 +import javax.persistence.Enumerated;
  import javax.persistence.GeneratedValue;
  import javax.persistence.GenerationType;
  import javax.persistence.Id;
@@@ -101,20 -94,12 +102,20 @@@ public class RepositoryVersionEntity 
    @Column(name = "repositories")
    private String operatingSystems;
  
 +  @Column(name = "repo_type", nullable = false, insertable = true, updatable = true)
 +  @Enumerated(value = EnumType.STRING)
 +  private RepositoryType type = RepositoryType.STANDARD;
 +
    @OneToMany(cascade = CascadeType.REMOVE, mappedBy = "repositoryVersion")
-   private Collection<ClusterVersionEntity> clusterVersionEntities;
+   private Set<ClusterVersionEntity> clusterVersionEntities;
  
    @OneToMany(cascade = CascadeType.REMOVE, mappedBy = "repositoryVersion")
-   private Collection<HostVersionEntity> hostVersionEntities;
+   private Set<HostVersionEntity> hostVersionEntities;
  
 +  @ElementCollection(targetClass = Component.class)
 +  @CollectionTable(name = "repo_version_component", joinColumns = @JoinColumn(name = "repo_version_id"))
 +  private List<Component> components = new ArrayList<>();
 +
    // ----- RepositoryVersionEntity -------------------------------------------------------
  
    public RepositoryVersionEntity() {

http://git-wip-us.apache.org/repos/asf/ambari/blob/6ae63efd/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClusterImpl.java
----------------------------------------------------------------------
diff --cc ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClusterImpl.java
index 92df281,7ced845..fc1e274
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClusterImpl.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClusterImpl.java
@@@ -1219,9 -1221,10 +1221,10 @@@ public class ClusterImpl implements Clu
     * UPGRADE_FAILED: at least one host in UPGRADE_FAILED
     * UPGRADED: all hosts are UPGRADED
     * UPGRADING: at least one host is UPGRADING, and the rest in UPGRADING|INSTALLED
-    * INSTALLED: all hosts in INSTALLED -OR- INSTALLED and NOT_REQUIRED
+    * UPGRADING: at least one host is UPGRADED, and the rest in UPGRADING|INSTALLED
+    * INSTALLED: all hosts in INSTALLED
     * INSTALL_FAILED: at least one host in INSTALL_FAILED
 -   * INSTALLING: all hosts in INSTALLING. Notice that if one host is CURRENT and another is INSTALLING, then the
 +   * INSTALLING: all hosts in INSTALLING -or- INSTALLING and NOT_REQUIRED. Notice that if one host is CURRENT and another is INSTALLING, then the
     * effective version will be OUT_OF_SYNC.
     * OUT_OF_SYNC: otherwise
     * @param stateToHosts Map from state to the collection of hosts with that state
@@@ -1252,15 -1262,22 +1262,24 @@@
      if (stateToHosts.containsKey(RepositoryVersionState.INSTALLED) && stateToHosts.get(RepositoryVersionState.INSTALLED).size() == totalHosts) {
        return RepositoryVersionState.INSTALLED;
      }
-     if (stateToHosts.containsKey(RepositoryVersionState.INSTALL_FAILED) && !stateToHosts.get(RepositoryVersionState.INSTALL_FAILED).isEmpty()) {
-       return RepositoryVersionState.INSTALL_FAILED;
+     if (stateToHosts.containsKey(RepositoryVersionState.INSTALL_FAILED) &&
+       !stateToHosts.get(RepositoryVersionState.INSTALL_FAILED).isEmpty()) {
+       // Installation failed on some host(s). But
+       // cluster version state should transition to Install Failed only after
+       // all hosts have finished installation. Otherwise, UI will misbehave
+       // (hide progress dialog before installation is finished)
+       if (! stateToHosts.containsKey(RepositoryVersionState.INSTALLING) ||
+         stateToHosts.get(RepositoryVersionState.INSTALLING).isEmpty()) {
+         return RepositoryVersionState.INSTALL_FAILED;
+       }
      }
  
 -    final int totalINSTALLING = stateToHosts.containsKey(RepositoryVersionState.INSTALLING) ? stateToHosts.get(RepositoryVersionState.INSTALLING).size() : 0;
 -    final int totalINSTALLED = stateToHosts.containsKey(RepositoryVersionState.INSTALLED) ? stateToHosts.get(RepositoryVersionState.INSTALLED).size() : 0;
 -    final int totalINSTALL_FAILED = stateToHosts.containsKey(RepositoryVersionState.INSTALL_FAILED) ? stateToHosts.get(RepositoryVersionState.INSTALL_FAILED).size() : 0;
 -    if (totalINSTALLING + totalINSTALLED + totalINSTALL_FAILED== totalHosts) {
 +    int totalInstalling = stateToHosts.containsKey(RepositoryVersionState.INSTALLING) ? stateToHosts.get(RepositoryVersionState.INSTALLING).size() : 0;
 +    int totalInstalled = stateToHosts.containsKey(RepositoryVersionState.INSTALLED) ? stateToHosts.get(RepositoryVersionState.INSTALLED).size() : 0;
 +    int totalNotRequired = stateToHosts.containsKey(RepositoryVersionState.NOT_REQUIRED) ? stateToHosts.get(RepositoryVersionState.NOT_REQUIRED).size() : 0;
++    int totalInstallFailed = stateToHosts.containsKey(RepositoryVersionState.INSTALL_FAILED) ? stateToHosts.get(RepositoryVersionState.INSTALL_FAILED).size() : 0;
 +
-     if (totalInstalling + totalInstalled == totalHosts) {
++    if (totalInstalling + totalInstalled + totalInstallFailed == totalHosts) {
        return RepositoryVersionState.INSTALLING;
      }
  

http://git-wip-us.apache.org/repos/asf/ambari/blob/6ae63efd/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/6ae63efd/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/6ae63efd/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/6ae63efd/ambari-server/src/main/resources/Ambari-DDL-Postgres-EMBEDDED-CREATE.sql
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/6ae63efd/ambari-server/src/main/resources/Ambari-DDL-SQLAnywhere-CREATE.sql
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/6ae63efd/ambari-server/src/main/resources/Ambari-DDL-SQLServer-CREATE.sql
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/6ae63efd/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ClusterStackVersionResourceProviderTest.java
----------------------------------------------------------------------


[31/50] ambari git commit: AMBARI-14040. Update cluster fails with NPE if the request contains null configuration property values (Sebastian Toader via alejandro)

Posted by nc...@apache.org.
AMBARI-14040. Update cluster fails with NPE if the request contains null configuration property values (Sebastian Toader via alejandro)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/a62c4b8a
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/a62c4b8a
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/a62c4b8a

Branch: refs/heads/branch-dev-patch-upgrade
Commit: a62c4b8aad56fce846603b9ad45ceead0300ae29
Parents: 0ada28a
Author: Alejandro Fernandez <af...@hortonworks.com>
Authored: Mon Nov 30 16:01:50 2015 -0800
Committer: Alejandro Fernandez <af...@hortonworks.com>
Committed: Mon Nov 30 16:01:50 2015 -0800

----------------------------------------------------------------------
 .../AmbariManagementControllerImpl.java         |  4 +-
 .../internal/StackAdvisorResourceProvider.java  |  8 +-
 .../AmbariManagementControllerImplTest.java     | 90 +++++++++++++++++++-
 .../StackAdvisorResourceProviderTest.java       | 40 +++++++++
 4 files changed, 134 insertions(+), 8 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/a62c4b8a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java
index 0e3e7b8..c0dc342 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java
@@ -1432,8 +1432,8 @@ public class AmbariManagementControllerImpl implements AmbariManagementControlle
             break;
           } else {
             for (Entry<String, String> property : requestConfigProperties.entrySet()) {
-              if (!StringUtils.equals(property.getValue(),clusterConfigProperties.get(property.getKey()))) {
-                isConfigurationCreationNeeded =true;
+              if (!StringUtils.equals(property.getValue(), clusterConfigProperties.get(property.getKey()))) {
+                isConfigurationCreationNeeded = true;
                 break;
               }
             }

http://git-wip-us.apache.org/repos/asf/ambari/blob/a62c4b8a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/StackAdvisorResourceProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/StackAdvisorResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/StackAdvisorResourceProvider.java
index fe3d006..0ad9126 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/StackAdvisorResourceProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/StackAdvisorResourceProvider.java
@@ -266,8 +266,12 @@ public abstract class StackAdvisorResourceProvider extends ReadOnlyResourceProvi
             siteMap.put(propertiesProperty, propertiesMap);
           }
 
-          String value = properties.get(property).toString();
-          propertiesMap.put(propertyName, value);
+          Object propVal = properties.get(property);
+          if (propVal != null)
+            propertiesMap.put(propertyName, propVal.toString());
+          else
+            LOG.info(String.format("No value specified for configuration property, name = %s ", property));
+
         } catch (Exception e) {
           LOG.debug(String.format("Error handling configuration property, name = %s", property), e);
           // do nothing

http://git-wip-us.apache.org/repos/asf/ambari/blob/a62c4b8a/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerImplTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerImplTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerImplTest.java
index ca3ca36..e2ec5e0 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerImplTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerImplTest.java
@@ -18,6 +18,8 @@
 
 package org.apache.ambari.server.controller;
 
+import com.google.common.collect.ImmutableList;
+import com.google.common.collect.Maps;
 import com.google.gson.Gson;
 import com.google.inject.Binder;
 import com.google.inject.Guice;
@@ -52,8 +54,10 @@ import org.apache.ambari.server.security.ldap.LdapBatchDto;
 import org.apache.ambari.server.state.Cluster;
 import org.apache.ambari.server.state.Clusters;
 import org.apache.ambari.server.state.ComponentInfo;
+import org.apache.ambari.server.state.ConfigImpl;
 import org.apache.ambari.server.state.Host;
 import org.apache.ambari.server.state.MaintenanceState;
+import org.apache.ambari.server.state.PropertyInfo;
 import org.apache.ambari.server.state.RepositoryInfo;
 import org.apache.ambari.server.state.SecurityType;
 import org.apache.ambari.server.state.Service;
@@ -85,7 +89,20 @@ import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.HOST_SYS_
 import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.JAVA_VERSION;
 import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.STACK_NAME;
 import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.STACK_VERSION;
-import static org.easymock.EasyMock.*;
+import static org.easymock.EasyMock.anyBoolean;
+import static org.easymock.EasyMock.anyObject;
+import static org.easymock.EasyMock.capture;
+import static org.easymock.EasyMock.captureBoolean;
+import static org.easymock.EasyMock.createMock;
+import static org.easymock.EasyMock.createMockBuilder;
+import static org.easymock.EasyMock.createNiceMock;
+import static org.easymock.EasyMock.createStrictMock;
+import static org.easymock.EasyMock.eq;
+import static org.easymock.EasyMock.expect;
+import static org.easymock.EasyMock.expectLastCall;
+import static org.easymock.EasyMock.replay;
+import static org.easymock.EasyMock.reset;
+import static org.easymock.EasyMock.verify;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertNotNull;
 import static org.junit.Assert.assertNull;
@@ -346,7 +363,7 @@ public class AmbariManagementControllerImplTest {
 
     expect(service.getName()).andReturn("service");
     expect(service.getServiceComponent("component")).andThrow(
-        new ServiceComponentNotFoundException("cluster", "service", "component"));
+      new ServiceComponentNotFoundException("cluster", "service", "component"));
     expect(service.getDesiredStackVersion()).andReturn(stackId);
     expect(stackId.getStackName()).andReturn("stack");
     expect(stackId.getStackVersion()).andReturn("1.0");
@@ -356,7 +373,7 @@ public class AmbariManagementControllerImplTest {
     expect(service.getServiceComponents()).andReturn(componentsMap);
     expect(component1.getServiceComponentHosts()).andReturn(Collections.EMPTY_MAP);
     expect(component2.getServiceComponentHosts()).andReturn(
-        Collections.<String, ServiceComponentHost>singletonMap("anyHost", null));
+      Collections.<String, ServiceComponentHost>singletonMap("anyHost", null));
 
     ServiceInfo serviceInfo = createNiceMock(ServiceInfo.class);
     ComponentInfo compInfo = createNiceMock(ComponentInfo.class);
@@ -394,7 +411,7 @@ public class AmbariManagementControllerImplTest {
     expect(service.getServiceComponents()).andReturn(componentsMap);
     expect(component1.getServiceComponentHosts()).andReturn(Collections.EMPTY_MAP);
     expect(component2.getServiceComponentHosts()).andReturn(
-        Collections.<String, ServiceComponentHost>singletonMap("anyHost", null));
+      Collections.<String, ServiceComponentHost>singletonMap("anyHost", null));
 
     ServiceInfo serviceInfo = createNiceMock(ServiceInfo.class);
     expect(serviceInfo.getClientComponent()).andReturn(null);
@@ -566,6 +583,71 @@ public class AmbariManagementControllerImplTest {
   }
 
   /**
+   * Ensure that processing update request does not fail on configuration
+   * properties with no value specified (no value = null reference value)
+   */
+  @Test
+  public void testUpdateClustersWithNullConfigPropertyValues() throws Exception {
+    // member state mocks
+    Capture<AmbariManagementController> controllerCapture = new Capture<AmbariManagementController>();
+    Injector injector = createStrictMock(Injector.class);
+    Cluster cluster = createNiceMock(Cluster.class);
+    ActionManager actionManager = createNiceMock(ActionManager.class);
+    ClusterRequest clusterRequest = createNiceMock(ClusterRequest.class);
+
+    // requests
+    Set<ClusterRequest> setRequests = Collections.singleton(clusterRequest);
+
+    KerberosHelper kerberosHelper = createStrictMock(KerberosHelper.class);
+    // expectations
+    injector.injectMembers(capture(controllerCapture));
+    expect(injector.getInstance(Gson.class)).andReturn(null);
+    expect(injector.getInstance(MaintenanceStateHelper.class)).andReturn(null);
+    expect(injector.getInstance(KerberosHelper.class)).andReturn(kerberosHelper);
+    expect(clusterRequest.getClusterName()).andReturn("clusterNew").anyTimes();
+    expect(clusterRequest.getClusterId()).andReturn(1L).anyTimes();
+
+    ConfigurationRequest configReq = new ConfigurationRequest();
+    final Map<String, String> configReqProps = Maps.newHashMap();
+    configReqProps.put("p1", null);
+    configReq.setProperties(configReqProps);
+
+    expect(clusterRequest.getDesiredConfig()).andReturn(ImmutableList.of(configReq)).anyTimes();
+    expect(clusters.getClusterById(1L)).andReturn(cluster).anyTimes();
+    expect(cluster.getClusterName()).andReturn("clusterOld").anyTimes();
+    expect(cluster.getConfigPropertiesTypes(anyObject(String.class))).andReturn(Maps.<PropertyInfo.PropertyType, Set<String>>newHashMap()).anyTimes();
+    expect(cluster.getDesiredConfigByType(anyObject(String.class))).andReturn(new ConfigImpl("config-type") {
+      @Override
+      public Map<String, Map<String, String>> getPropertiesAttributes() {
+        return Maps.newHashMap();
+      }
+
+      @Override
+      public Map<String, String> getProperties() {
+        return configReqProps;
+      }
+
+    }).anyTimes();
+
+    cluster.addSessionAttributes(anyObject(Map.class));
+    expectLastCall().once();
+
+    cluster.setClusterName("clusterNew");
+    expectLastCall();
+
+    // replay mocks
+    replay(actionManager, cluster, clusters, injector, clusterRequest, sessionManager);
+
+    // test
+    AmbariManagementController controller = new AmbariManagementControllerImpl(actionManager, clusters, injector);
+    controller.updateClusters(setRequests, null);
+
+    // assert and verify
+    assertSame(controller, controllerCapture.getValue());
+    verify(actionManager, cluster, clusters, injector, clusterRequest, sessionManager);
+  }
+
+  /**
    * Ensure that when the cluster is updated KerberosHandler.toggleKerberos is not invoked unless
    * the security type is altered
    */

http://git-wip-us.apache.org/repos/asf/ambari/blob/a62c4b8a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/StackAdvisorResourceProviderTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/StackAdvisorResourceProviderTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/StackAdvisorResourceProviderTest.java
index 8c5337b..e3b89b8 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/StackAdvisorResourceProviderTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/StackAdvisorResourceProviderTest.java
@@ -33,6 +33,7 @@ import java.util.Set;
 
 import static org.apache.ambari.server.controller.internal.StackAdvisorResourceProvider.CONFIGURATIONS_PROPERTY_ID;
 import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertNotNull;
 import static org.mockito.Mockito.doReturn;
 import static org.mockito.Mockito.mock;
@@ -73,4 +74,43 @@ public class StackAdvisorResourceProviderTest {
     assertEquals("string", properties.get("string_prop"));
     assertEquals("[array1, array2]", properties.get("array_prop"));
   }
+
+  @Test
+  public void testCalculateConfigurationsWithNullPropertyValues() throws Exception {
+
+    Map<Resource.Type, String> keyPropertyIds = Collections.emptyMap();
+    Set<String> propertyIds = Collections.emptySet();
+    AmbariManagementController ambariManagementController = mock(AmbariManagementController.class);
+    RecommendationResourceProvider provider = new RecommendationResourceProvider(propertyIds,
+      keyPropertyIds, ambariManagementController);
+
+    Request request = mock(Request.class);
+    Set<Map<String, Object>> propertiesSet = new HashSet<Map<String, Object>>();
+    Map<String, Object> propertiesMap = new HashMap<String, Object>();
+    propertiesMap.put(CONFIGURATIONS_PROPERTY_ID + "site/properties/string_prop", null); //null value means no value specified for the property
+    List<Object> array = new ArrayList<Object>();
+    array.add("array1");
+    array.add("array2");
+    propertiesMap.put(CONFIGURATIONS_PROPERTY_ID + "site/properties/array_prop", array);
+    propertiesSet.add(propertiesMap);
+
+    doReturn(propertiesSet).when(request).getProperties();
+
+    Map<String, Map<String, Map<String, String>>> calculatedConfigurations = provider.calculateConfigurations(request);
+
+    assertNotNull(calculatedConfigurations);
+    assertEquals(1, calculatedConfigurations.size());
+    Map<String, Map<String, String>> site = calculatedConfigurations.get("site");
+    assertNotNull(site);
+    assertEquals(1, site.size());
+    Map<String, String> properties = site.get("properties");
+    assertNotNull(properties);
+
+    assertEquals("[array1, array2]", properties.get("array_prop"));
+
+
+    // config properties with null values should be ignored
+    assertFalse(properties.containsKey("string_prop"));
+
+  }
 }


[21/50] ambari git commit: AMBARI-14115. Need IDs or classes for input fields in slider view Create App Configuration step

Posted by nc...@apache.org.
AMBARI-14115. Need IDs or classes for input fields in slider view Create App Configuration step


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/78bef69a
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/78bef69a
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/78bef69a

Branch: refs/heads/branch-dev-patch-upgrade
Commit: 78bef69aa58bdb386aafc6c30ea880c5ccadc7a0
Parents: 1eb6407
Author: Alex Antonenko <hi...@gmail.com>
Authored: Mon Nov 30 17:56:59 2015 +0200
Committer: Alex Antonenko <hi...@gmail.com>
Committed: Mon Nov 30 17:59:37 2015 +0200

----------------------------------------------------------------------
 .../slider/src/main/resources/ui/app/models/config_property.js    | 3 +++
 .../slider/src/main/resources/ui/app/templates/common/config.hbs  | 2 +-
 2 files changed, 4 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/78bef69a/contrib/views/slider/src/main/resources/ui/app/models/config_property.js
----------------------------------------------------------------------
diff --git a/contrib/views/slider/src/main/resources/ui/app/models/config_property.js b/contrib/views/slider/src/main/resources/ui/app/models/config_property.js
index aa7bbc8..cb92aad 100644
--- a/contrib/views/slider/src/main/resources/ui/app/models/config_property.js
+++ b/contrib/views/slider/src/main/resources/ui/app/models/config_property.js
@@ -36,6 +36,9 @@ App.ConfigProperty = Em.Object.extend({
         return Em.TextField;
     }
   }.property('viewType'),
+  className: function () {
+    return "value-for-" + this.get('label').replace('.', '-');
+  }.property('viewType'),
   readOnly: false,
   //used for config with "select" view
   options: [],

http://git-wip-us.apache.org/repos/asf/ambari/blob/78bef69a/contrib/views/slider/src/main/resources/ui/app/templates/common/config.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/slider/src/main/resources/ui/app/templates/common/config.hbs b/contrib/views/slider/src/main/resources/ui/app/templates/common/config.hbs
index 348cae0..fa11a23 100644
--- a/contrib/views/slider/src/main/resources/ui/app/templates/common/config.hbs
+++ b/contrib/views/slider/src/main/resources/ui/app/templates/common/config.hbs
@@ -24,7 +24,7 @@
       {{view config.view
         value=config.value
         content=config.options
-        class="form-control"
+        classBinding=":form-control :slider-wiz-config-value config.className"
         disabled=config.readOnly
       }}
     </div>


[30/50] ambari git commit: AMBARI-13988. Hive View : Upload Table Tab : if selected datatabase is default then data does not show in table. (Nitiraj Rathore via yusaku)

Posted by nc...@apache.org.
AMBARI-13988. Hive View : Upload Table Tab : if selected datatabase is default then data does not show in table. (Nitiraj Rathore via yusaku)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/0ada28ab
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/0ada28ab
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/0ada28ab

Branch: refs/heads/branch-dev-patch-upgrade
Commit: 0ada28abc97e1b54833b98340a43e09c948a7e25
Parents: 89c6f26
Author: Yusaku Sako <yu...@hortonworks.com>
Authored: Mon Nov 30 15:56:13 2015 -0800
Committer: Yusaku Sako <yu...@hortonworks.com>
Committed: Mon Nov 30 15:56:13 2015 -0800

----------------------------------------------------------------------
 .../ambari/view/hive/resources/uploads/UploadService.java       | 5 +++--
 1 file changed, 3 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/0ada28ab/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/UploadService.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/UploadService.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/UploadService.java
index 2098a5f..8b5b851 100644
--- a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/UploadService.java
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/uploads/UploadService.java
@@ -64,6 +64,7 @@ public class UploadService extends BaseService {
 
   final private String HIVE_META_STORE_LOCATION_KEY = "hive.metastore.warehouse.dir";
   final private String HIVE_SITE = "hive-site";
+  final private String HIVE_DEFAULT_DB = "default";
 
   @POST
   @Path("/preview")
@@ -173,7 +174,8 @@ public class UploadService extends BaseService {
     createdJobController.submit();
     getResourceManager().saveIfModified(createdJobController);
 
-    String filePath = databaseName + ".db/" + tableName + "/" + tableName + ".csv";
+    String filePath = (databaseName == null || databaseName.equals(HIVE_DEFAULT_DB)) ? "" : databaseName + ".db/";
+    filePath += tableName + "/" + tableName + ".csv";
 
     JSONObject jobObject = new JSONObject();
     jobObject.put("jobId", job.getId());
@@ -236,7 +238,6 @@ public class UploadService extends BaseService {
       br.readLine(); // TODO : remove the header line. Wrong if first record is beyond first endline
     }
 
-
     String basePath = getHiveMetaStoreLocation();
     if (null == basePath)
       basePath = "/apps/hive/warehouse";


[19/50] ambari git commit: AMBARI-14110. Clean up stop agent to not print unnecessary error message.(vbrodetskyi)

Posted by nc...@apache.org.
AMBARI-14110. Clean up stop agent to not print unnecessary error message.(vbrodetskyi)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/c4d5ff90
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/c4d5ff90
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/c4d5ff90

Branch: refs/heads/branch-dev-patch-upgrade
Commit: c4d5ff90c1946cfbff81200bff2376b372bc7fff
Parents: 85c7515
Author: Vitaly Brodetskyi <vb...@hortonworks.com>
Authored: Mon Nov 30 09:51:17 2015 +0200
Committer: Vitaly Brodetskyi <vb...@hortonworks.com>
Committed: Mon Nov 30 09:51:17 2015 +0200

----------------------------------------------------------------------
 ambari-agent/src/main/python/ambari_agent/main.py     | 2 +-
 ambari-agent/src/test/python/ambari_agent/TestMain.py | 2 +-
 2 files changed, 2 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/c4d5ff90/ambari-agent/src/main/python/ambari_agent/main.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/main/python/ambari_agent/main.py b/ambari-agent/src/main/python/ambari_agent/main.py
index 74697c5..731cbfc 100644
--- a/ambari-agent/src/main/python/ambari_agent/main.py
+++ b/ambari-agent/src/main/python/ambari_agent/main.py
@@ -189,7 +189,7 @@ def stop_agent():
       res = runner.run([AMBARI_SUDO_BINARY, 'kill', '-9', str(pid)])
       if res['exitCode'] != 0:
         raise Exception("Error while performing agent stop. " + res['error'] + res['output'])
-    sys.exit(1)
+    sys.exit(0)
 
 def reset_agent(options):
   try:

http://git-wip-us.apache.org/repos/asf/ambari/blob/c4d5ff90/ambari-agent/src/test/python/ambari_agent/TestMain.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/test/python/ambari_agent/TestMain.py b/ambari-agent/src/test/python/ambari_agent/TestMain.py
index 696e597..d044027 100644
--- a/ambari-agent/src/test/python/ambari_agent/TestMain.py
+++ b/ambari-agent/src/test/python/ambari_agent/TestMain.py
@@ -232,7 +232,7 @@ class TestMain(unittest.TestCase):
       main.stop_agent()
       kill_mock.assert_any_call(['ambari-sudo.sh', 'kill', '-15', pid])
       kill_mock.assert_any_call(['ambari-sudo.sh', 'kill', '-9', pid])
-      sys_exit_mock.assert_called_with(1)
+      sys_exit_mock.assert_called_with(0)
 
     # Restore
     ProcessHelper.pidfile = oldpid


[14/50] ambari git commit: AMBARI-14108. AMS Collector does not start with 2.1.2 input and env scripts (Aravindan Vijayan via smohanty)

Posted by nc...@apache.org.
AMBARI-14108. AMS Collector does not start with 2.1.2 input and env scripts (Aravindan Vijayan via smohanty)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/f8013699
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/f8013699
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/f8013699

Branch: refs/heads/branch-dev-patch-upgrade
Commit: f801369917633d357be0e59c645c9979e5686287
Parents: 20a9ba1
Author: Sumit Mohanty <sm...@hortonworks.com>
Authored: Sun Nov 29 08:08:20 2015 -0800
Committer: Sumit Mohanty <sm...@hortonworks.com>
Committed: Sun Nov 29 08:08:20 2015 -0800

----------------------------------------------------------------------
 .../common-services/AMBARI_METRICS/0.1.0/package/scripts/params.py | 2 ++
 1 file changed, 2 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/f8013699/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/params.py b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/params.py
index 85e9cb3..68a3daa 100644
--- a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/params.py
@@ -99,11 +99,13 @@ master_heapsize = config['configurations']['ams-hbase-env']['hbase_master_heapsi
 regionserver_heapsize = config['configurations']['ams-hbase-env']['hbase_regionserver_heapsize']
 
 # Check if hbase java options already have appended "m". If Yes, remove the trailing m.
+metrics_collector_heapsize = int(trim_heap_property(str(metrics_collector_heapsize), "m"))
 master_heapsize = int(trim_heap_property(str(master_heapsize), "m"))
 regionserver_heapsize = int(trim_heap_property(str(regionserver_heapsize), "m"))
 
 regionserver_xmn_max = default('/configurations/ams-hbase-env/hbase_regionserver_xmn_max', None)
 if regionserver_xmn_max:
+  regionserver_xmn_max = int(trim_heap_property(str(regionserver_xmn_max), "m"))
   regionserver_xmn_percent = config['configurations']['ams-hbase-env']['hbase_regionserver_xmn_ratio']
   regionserver_xmn_size = calc_xmn_from_xms(regionserver_heapsize, regionserver_xmn_percent, regionserver_xmn_max)
 else:


[15/50] ambari git commit: AMBARI-14107. Stack advisor error for validateAmsHbaseEnvConfigurations (Aravindan Vijayan via smohanty)

Posted by nc...@apache.org.
AMBARI-14107. Stack advisor error for validateAmsHbaseEnvConfigurations (Aravindan Vijayan via smohanty)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/d8a42253
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/d8a42253
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/d8a42253

Branch: refs/heads/branch-dev-patch-upgrade
Commit: d8a42253fdbad9c8d11ecbee4d267d67f4b28928
Parents: f801369
Author: Sumit Mohanty <sm...@hortonworks.com>
Authored: Sun Nov 29 08:17:59 2015 -0800
Committer: Sumit Mohanty <sm...@hortonworks.com>
Committed: Sun Nov 29 08:24:18 2015 -0800

----------------------------------------------------------------------
 .../stacks/HDP/2.0.6/services/stack_advisor.py  | 94 +++++++++++---------
 1 file changed, 51 insertions(+), 43 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/d8a42253/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/stack_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/stack_advisor.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/stack_advisor.py
index 6031fbd..d4d0e83 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/stack_advisor.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/stack_advisor.py
@@ -914,8 +914,16 @@ class HDP206StackAdvisor(DefaultStackAdvisor):
     gb = 1024 * mb
 
     regionServerItem = self.validatorLessThenDefaultValue(properties, recommendedDefaults, "hbase_regionserver_heapsize") ## FIXME if new service added
+    if regionServerItem:
+      validationItems.extend([{"config-name": "hbase_regionserver_heapsize", "item": regionServerItem}])
+
     hbaseMasterHeapsizeItem = self.validatorLessThenDefaultValue(properties, recommendedDefaults, "hbase_master_heapsize")
+    if hbaseMasterHeapsizeItem:
+      validationItems.extend([{"config-name": "hbase_master_heapsize", "item": hbaseMasterHeapsizeItem}])
+
     logDirItem = self.validatorEqualsPropertyItem(properties, "hbase_log_dir", ams_env, "metrics_collector_log_dir")
+    if logDirItem:
+      validationItems.extend([{"config-name": "hbase_log_dir", "item": logDirItem}])
 
     collector_heapsize = to_number(ams_env.get("metrics_collector_heapsize"))
     hbase_master_heapsize = to_number(properties["hbase_master_heapsize"])
@@ -962,6 +970,11 @@ class HDP206StackAdvisor(DefaultStackAdvisor):
         masterXmnItem = self.getWarnItem("Value is greater than the recommended maximum Xmn size of {0} "
                                          "(20% of hbase_master_heapsize + hbase_regionserver_heapsize)"
                                          .format(int(math.floor(maxMasterXmn))))
+    if masterXmnItem:
+      validationItems.extend([{"config-name": "hbase_master_xmn_size", "item": masterXmnItem}])
+
+    if regionServerXmnItem:
+      validationItems.extend([{"config-name": "regionserver_xmn_size", "item": regionServerXmnItem}])
 
     if hbaseMasterHeapsizeItem is None:
       hostMasterComponents = {}
@@ -988,52 +1001,47 @@ class HDP206StackAdvisor(DefaultStackAdvisor):
                                   "Ambari Metrics Collector component and ensure " \
                                   "the host has sufficient memory available."
 
-              hbaseMasterHeapsizeItem = self.getWarnItem(
-                masterHostMessage.format(
+              hbaseMasterHeapsizeItem = self.getWarnItem(masterHostMessage.format(
                   collectorHostName, str(", ".join(hostMasterComponents[collectorHostName]))))
-
-          # Check for unused RAM on AMS Collector node
-          hostComponents = []
-          for service in services["services"]:
-            for component in service["components"]:
-              if component["StackServiceComponents"]["hostnames"] is not None:
-                if collectorHostName in component["StackServiceComponents"]["hostnames"]:
-                  hostComponents.append(component["StackServiceComponents"]["component_name"])
-
-          requiredMemory = getMemorySizeRequired(hostComponents, configurations)
-          unusedMemory = host["Hosts"]["total_mem"] * 1024 - requiredMemory # in bytes
-          if unusedMemory > 4*gb:  # warn user, if more than 4GB RAM is unused
-            heapPropertyToIncrease = "hbase_regionserver_heapsize" if is_hbase_distributed else "hbase_master_heapsize"
-            xmnPropertyToIncrease = "regionserver_xmn_size" if is_hbase_distributed else "hbase_master_xmn_size"
-            recommended_collector_heapsize = int((unusedMemory - 4*gb)/5) + collector_heapsize*mb
-            recommended_hbase_heapsize = int((unusedMemory - 4*gb)*4/5) + to_number(properties.get(heapPropertyToIncrease))*mb
-            recommended_hbase_heapsize = min(32*gb, recommended_hbase_heapsize) #Make sure heapsize <= 32GB
-            recommended_xmn_size = round_to_n(0.12*recommended_hbase_heapsize/mb,128)
-
-            if collector_heapsize < recommended_collector_heapsize:
-              collectorHeapsizeItem = self.getWarnItem("{0} MB RAM is unused on the host {1} based on "
-                                                       "components assigned. Consider allocating {2} MB"
-                                        .format(unusedMemory/mb, collectorHostName, recommended_collector_heapsize/mb))
-              validationItems.extend([{"config-name": "metrics_collector_heapsize", "item": collectorHeapsizeItem}])
-
-            if to_number(properties[heapPropertyToIncreaseItem]) < recommended_hbase_heapsize:
-              heapPropertyToIncreaseItem = self.getWarnItem("Consider allocating {0} MB to use up some unused memory "
-                                                            "on host".format(recommended_hbase_heapsize/mb))
-              validationItems.extend([{"config-name": heapPropertyToIncrease, "item": heapPropertyToIncreaseItem}])
-
-            if to_number(properties[xmnPropertyToIncrease]) < recommended_hbase_heapsize:
-              xmnPropertyToIncreaseItem = self.getWarnItem("Consider allocating {0} MB to use up some unused memory "
-                                                           "on host".format(recommended_xmn_size))
-              validationItems.extend([{"config-name": xmnPropertyToIncrease, "item": xmnPropertyToIncreaseItem}])
+              if hbaseMasterHeapsizeItem:
+                validationItems.extend([{"config-name": "hbase_master_heapsize", "item": hbaseMasterHeapsizeItem}])
+
+            # Check for unused RAM on AMS Collector node
+            hostComponents = []
+            for service in services["services"]:
+              for component in service["components"]:
+                if component["StackServiceComponents"]["hostnames"] is not None:
+                  if collectorHostName in component["StackServiceComponents"]["hostnames"]:
+                    hostComponents.append(component["StackServiceComponents"]["component_name"])
+
+            requiredMemory = getMemorySizeRequired(hostComponents, configurations)
+            unusedMemory = host["Hosts"]["total_mem"] * 1024 - requiredMemory # in bytes
+            if unusedMemory > 4*gb:  # warn user, if more than 4GB RAM is unused
+              heapPropertyToIncrease = "hbase_regionserver_heapsize" if is_hbase_distributed else "hbase_master_heapsize"
+              xmnPropertyToIncrease = "regionserver_xmn_size" if is_hbase_distributed else "hbase_master_xmn_size"
+              recommended_collector_heapsize = int((unusedMemory - 4*gb)/5) + collector_heapsize*mb
+              recommended_hbase_heapsize = int((unusedMemory - 4*gb)*4/5) + to_number(properties.get(heapPropertyToIncrease))*mb
+              recommended_hbase_heapsize = min(32*gb, recommended_hbase_heapsize) #Make sure heapsize <= 32GB
+              recommended_xmn_size = round_to_n(0.12*recommended_hbase_heapsize/mb,128)
+
+              if collector_heapsize < recommended_collector_heapsize or \
+                  to_number(properties[heapPropertyToIncrease]) < recommended_hbase_heapsize:
+                collectorHeapsizeItem = self.getWarnItem("{0} MB RAM is unused on the host {1} based on components " \
+                                                         "assigned. Consider allocating  {2} MB to " \
+                                                         "metrics_collector_heapsize in ams-env, " \
+                                                         "{3} MB to {4} in ams-hbase-env"
+                                                         .format(unusedMemory/mb, collectorHostName,
+                                                                 recommended_collector_heapsize/mb,
+                                                                 recommended_hbase_heapsize/mb,
+                                                                 heapPropertyToIncrease))
+                validationItems.extend([{"config-name": heapPropertyToIncrease, "item": collectorHeapsizeItem}])
+
+              if to_number(properties[xmnPropertyToIncrease]) < recommended_hbase_heapsize:
+                xmnPropertyToIncreaseItem = self.getWarnItem("Consider allocating {0} MB to use up some unused memory "
+                                                             "on host".format(recommended_xmn_size))
+                validationItems.extend([{"config-name": xmnPropertyToIncrease, "item": xmnPropertyToIncreaseItem}])
       pass
 
-    validationItems.extend([
-      {"config-name": "hbase_regionserver_heapsize", "item": regionServerItem},
-      {"config-name": "hbase_master_heapsize", "item": hbaseMasterHeapsizeItem},
-      {"config-name": "hbase_log_dir", "item": logDirItem},
-      {"config-name": "hbase_master_xmn_size", "item": masterXmnItem},
-      {"config-name": "regionserver_xmn_size", "item": regionServerXmnItem}
-    ])
     return self.toConfigurationValidationProblems(validationItems, "ams-hbase-env")
 
 


[26/50] ambari git commit: AMBARI-13943. Ambari LDAP integration cannot handle LDAP directories with multiple entries for the same user. (Oliver Szabo via rnettleton)

Posted by nc...@apache.org.
AMBARI-13943. Ambari LDAP integration cannot handle LDAP directories with multiple entries for the same user. (Oliver Szabo via rnettleton)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/fd1181f4
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/fd1181f4
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/fd1181f4

Branch: refs/heads/branch-dev-patch-upgrade
Commit: fd1181f474e39492653fc70ccbf86d31f192a1e6
Parents: 93bf1d3
Author: Bob Nettleton <rn...@hortonworks.com>
Authored: Mon Nov 30 14:02:12 2015 -0500
Committer: Bob Nettleton <rn...@hortonworks.com>
Committed: Mon Nov 30 14:02:35 2015 -0500

----------------------------------------------------------------------
 .../server/security/authorization/LdapServerProperties.java    | 6 ++++--
 .../security/authorization/LdapServerPropertiesTest.java       | 5 +++--
 2 files changed, 7 insertions(+), 4 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/fd1181f4/ambari-server/src/main/java/org/apache/ambari/server/security/authorization/LdapServerProperties.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/security/authorization/LdapServerProperties.java b/ambari-server/src/main/java/org/apache/ambari/server/security/authorization/LdapServerProperties.java
index f28ee50..8eeaf35 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/security/authorization/LdapServerProperties.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/security/authorization/LdapServerProperties.java
@@ -53,7 +53,7 @@ public class LdapServerProperties {
   private String userSearchBase = "";
 
   private String groupSearchFilter;
-  private static final String userSearchFilter = "({attribute}={0})";
+  private static final String userSearchFilter = "(&({attribute}={0})(objectClass={userObjectClass}))";
 
   //LDAP pagination properties
   private boolean paginationEnabled = true;
@@ -138,7 +138,9 @@ public class LdapServerProperties {
   }
 
   public String getUserSearchFilter() {
-    return userSearchFilter.replace("{attribute}", usernameAttribute);
+    return userSearchFilter
+      .replace("{attribute}", usernameAttribute)
+      .replace("{userObjectClass}", userObjectClass);
   }
 
   public String getUsernameAttribute() {

http://git-wip-us.apache.org/repos/asf/ambari/blob/fd1181f4/ambari-server/src/test/java/org/apache/ambari/server/security/authorization/LdapServerPropertiesTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/security/authorization/LdapServerPropertiesTest.java b/ambari-server/src/test/java/org/apache/ambari/server/security/authorization/LdapServerPropertiesTest.java
index 9043439..0797239 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/security/authorization/LdapServerPropertiesTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/security/authorization/LdapServerPropertiesTest.java
@@ -57,6 +57,7 @@ public class LdapServerPropertiesTest {
     ldapServerProperties.setUseSsl(false);
     ldapServerProperties.setPrimaryUrl("1.2.3.4:389");
     ldapServerProperties.setUsernameAttribute("uid");
+    ldapServerProperties.setUserObjectClass("dummyObjectClass");
   }
 
   @Test
@@ -76,9 +77,9 @@ public class LdapServerPropertiesTest {
 
   @Test
   public void testGetUserSearchFilter() throws Exception {
-    assertEquals(INCORRECT_USER_SEARCH_FILTER, "(uid={0})", ldapServerProperties.getUserSearchFilter());
+    assertEquals(INCORRECT_USER_SEARCH_FILTER, "(&(uid={0})(objectClass=dummyObjectClass))", ldapServerProperties.getUserSearchFilter());
     ldapServerProperties.setUsernameAttribute("anotherName");
-    assertEquals(INCORRECT_USER_SEARCH_FILTER, "(anotherName={0})", ldapServerProperties.getUserSearchFilter());
+    assertEquals(INCORRECT_USER_SEARCH_FILTER, "(&(anotherName={0})(objectClass=dummyObjectClass))", ldapServerProperties.getUserSearchFilter());
   }
 
   @Test


[28/50] ambari git commit: AMBARI-14104. Recommendations not applied if cluster creation template not contains all of the hostgroups. (Oliver Szabo via rnettleton)

Posted by nc...@apache.org.
AMBARI-14104. Recommendations not applied if cluster creation template not contains all of the hostgroups. (Oliver Szabo via rnettleton)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/fe690bfd
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/fe690bfd
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/fe690bfd

Branch: refs/heads/branch-dev-patch-upgrade
Commit: fe690bfdc71ecfb0d66285a13ab2b3babccb0b8d
Parents: edc8d14
Author: Bob Nettleton <rn...@hortonworks.com>
Authored: Mon Nov 30 16:51:05 2015 -0500
Committer: Bob Nettleton <rn...@hortonworks.com>
Committed: Mon Nov 30 16:51:05 2015 -0500

----------------------------------------------------------------------
 .../stackadvisor/StackAdvisorBlueprintProcessor.java  | 14 ++++++++------
 .../StackAdvisorBlueprintProcessorTest.java           |  2 ++
 2 files changed, 10 insertions(+), 6 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/fe690bfd/ambari-server/src/main/java/org/apache/ambari/server/api/services/stackadvisor/StackAdvisorBlueprintProcessor.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/api/services/stackadvisor/StackAdvisorBlueprintProcessor.java b/ambari-server/src/main/java/org/apache/ambari/server/api/services/stackadvisor/StackAdvisorBlueprintProcessor.java
index 0325885..d57c17d 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/api/services/stackadvisor/StackAdvisorBlueprintProcessor.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/api/services/stackadvisor/StackAdvisorBlueprintProcessor.java
@@ -129,13 +129,15 @@ public class StackAdvisorBlueprintProcessor {
         Set<String> components = hgComponents.getValue();
 
         Set<String> hosts = bindingHostGroups.get(hgName);
-        for (String component : components) {
-          Set<String> componentHosts = componentHostsMap.get(component);
-          if (componentHosts == null) { // if was not initialized
-            componentHosts = new HashSet<String>();
-            componentHostsMap.put(component, componentHosts);
+        if (hosts != null) {
+          for (String component : components) {
+            Set<String> componentHosts = componentHostsMap.get(component);
+            if (componentHosts == null) { // if was not initialized
+              componentHosts = new HashSet<String>();
+              componentHostsMap.put(component, componentHosts);
+            }
+            componentHosts.addAll(hosts);
           }
-          componentHosts.addAll(hosts);
         }
       }
     }

http://git-wip-us.apache.org/repos/asf/ambari/blob/fe690bfd/ambari-server/src/test/java/org/apache/ambari/server/api/services/stackadvisor/StackAdvisorBlueprintProcessorTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/api/services/stackadvisor/StackAdvisorBlueprintProcessorTest.java b/ambari-server/src/test/java/org/apache/ambari/server/api/services/stackadvisor/StackAdvisorBlueprintProcessorTest.java
index 2baa505..514e6ab 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/api/services/stackadvisor/StackAdvisorBlueprintProcessorTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/api/services/stackadvisor/StackAdvisorBlueprintProcessorTest.java
@@ -161,6 +161,8 @@ public class StackAdvisorBlueprintProcessorTest {
   private Map<String, HostGroup> createHostGroupMap() {
     Map<String, HostGroup> hgMap = Maps.newHashMap();
     hgMap.put("hg1", hostGroup);
+    hgMap.put("hg2", hostGroup);
+    hgMap.put("hg3", hostGroup);
     return hgMap;
   }
 


[17/50] ambari git commit: AMBARI-13938. Select on large BIGINT in Ambari Hive View returns incorrect value. (Nitiraj Rathore via Jaimin)

Posted by nc...@apache.org.
AMBARI-13938. Select on large BIGINT in Ambari Hive View returns incorrect value. (Nitiraj Rathore via Jaimin)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/c1b52050
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/c1b52050
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/c1b52050

Branch: refs/heads/branch-dev-patch-upgrade
Commit: c1b520502a0fd3b946643be97b0e3a6a5e0701b2
Parents: b3cdc4e
Author: Jaimin Jetly <ja...@hortonworks.com>
Authored: Sun Nov 29 21:44:20 2015 -0800
Committer: Jaimin Jetly <ja...@hortonworks.com>
Committed: Sun Nov 29 21:46:02 2015 -0800

----------------------------------------------------------------------
 .../resources/jobs/ResultsPaginationController.java | 16 +++++++++++++---
 1 file changed, 13 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/c1b52050/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/ResultsPaginationController.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/ResultsPaginationController.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/ResultsPaginationController.java
index 735e63d..84dec4d 100644
--- a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/ResultsPaginationController.java
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/resources/jobs/ResultsPaginationController.java
@@ -168,7 +168,7 @@ public class ResultsPaginationController {
 
   private static class ResultsResponse {
     private ArrayList<ColumnDescription> schema;
-    private ArrayList<Object[]> rows;
+    private ArrayList<String[]> rows;
     private int readCount;
     private boolean hasNext;
     private long offset;
@@ -183,10 +183,20 @@ public class ResultsPaginationController {
     }
 
     public void setRows(ArrayList<Object[]> rows) {
-      this.rows = rows;
+      if( null == rows ){
+        this.rows = null;
+      }
+      this.rows = new ArrayList<String[]>(rows.size());
+      for(Object[] row : rows ){
+        String[] strs = new String[row.length];
+        for( int colNum = 0 ; colNum < row.length ; colNum++ ){
+          strs[colNum] = String.valueOf(row[colNum]);
+        }
+        this.rows.add(strs);
+      }
     }
 
-    public ArrayList<Object[]> getRows() {
+    public ArrayList<String[]> getRows() {
       return rows;
     }
 


[36/50] ambari git commit: AMBARI-13782. Fix typo in warn message when restarting DataNodes. (Akira Ajisaka via yusaku)

Posted by nc...@apache.org.
AMBARI-13782. Fix typo in warn message when restarting DataNodes. (Akira Ajisaka via yusaku)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/24094e17
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/24094e17
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/24094e17

Branch: refs/heads/branch-dev-patch-upgrade
Commit: 24094e179dc2c2bba21634e1361cbbcbf053b145
Parents: 67672e6
Author: Yusaku Sako <yu...@hortonworks.com>
Authored: Mon Nov 30 17:12:23 2015 -0800
Committer: Yusaku Sako <yu...@hortonworks.com>
Committed: Mon Nov 30 17:12:23 2015 -0800

----------------------------------------------------------------------
 ambari-web/app/messages.js | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/24094e17/ambari-web/app/messages.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/messages.js b/ambari-web/app/messages.js
index 502a7a2..f6d2486 100644
--- a/ambari-web/app/messages.js
+++ b/ambari-web/app/messages.js
@@ -2655,7 +2655,7 @@ Em.I18n.translations = {
   'rollingrestart.dialog.err.invalid.batchsize': 'Invalid restart batch size: {0}',
   'rollingrestart.dialog.err.invalid.waitTime': 'Invalid wait time between batches: {0}',
   'rollingrestart.dialog.err.invalid.toleratesize': 'Invalid failure toleration count: {0}',
-  'rollingrestart.dialog.warn.datanode.batch.size': 'Restarting more than one DataNode at a time is not recommended. Doing so can lead to data unavailability and/or possible loss of data being actively written to HFDS.',
+  'rollingrestart.dialog.warn.datanode.batch.size': 'Restarting more than one DataNode at a time is not recommended. Doing so can lead to data unavailability and/or possible loss of data being actively written to HDFS.',
   'rollingrestart.dialog.msg.serviceNotInMM':'Note: This will trigger alerts. To suppress alerts, turn on Maintenance Mode for {0} prior to triggering a rolling restart',
   'rollingrestart.dialog.msg.staleConfigsOnly': 'Only restart {0}s with stale configs',
   'rollingrestart.rest.context': 'Rolling Restart of {0}s - batch {1} of {2}',


[06/50] ambari git commit: AMBARI-14096. HostCleanup.py is removing system CentOS repositories (aonishuk)

Posted by nc...@apache.org.
AMBARI-14096. HostCleanup.py is removing system CentOS repositories (aonishuk)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/00cb7bad
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/00cb7bad
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/00cb7bad

Branch: refs/heads/branch-dev-patch-upgrade
Commit: 00cb7bad7666fd86a0bec332928a42f58a656707
Parents: b470e3e
Author: Andrew Onishuk <ao...@hortonworks.com>
Authored: Fri Nov 27 14:37:02 2015 +0200
Committer: Andrew Onishuk <ao...@hortonworks.com>
Committed: Fri Nov 27 14:37:02 2015 +0200

----------------------------------------------------------------------
 .../src/main/resources/custom_actions/scripts/check_host.py  | 8 ++++----
 1 file changed, 4 insertions(+), 4 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/00cb7bad/ambari-server/src/main/resources/custom_actions/scripts/check_host.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/custom_actions/scripts/check_host.py b/ambari-server/src/main/resources/custom_actions/scripts/check_host.py
index 8b49725..01d79fe 100644
--- a/ambari-server/src/main/resources/custom_actions/scripts/check_host.py
+++ b/ambari-server/src/main/resources/custom_actions/scripts/check_host.py
@@ -77,12 +77,12 @@ THP_FILE_REDHAT = "/sys/kernel/mm/redhat_transparent_hugepage/enabled"
 THP_FILE_UBUNTU = "/sys/kernel/mm/transparent_hugepage/enabled"
 
 class CheckHost(Script):
-  # Packages that are used to find repos (then repos are used to find other packages)
+  # Package prefixes that are used to find repos (then repos are used to find other packages)
   PACKAGES = [
     "hadoop", "zookeeper", "webhcat", "oozie", "ambari", "*-manager-server-db",
     "*-manager-daemons", "mahout", "spark", "falcon", "hbase", "kafka", "knox",
-    "slider", "sqoop", "storm", "pig", "flume","hcatalog", "phoenix", "ranger",
-    "accumulo", "hive_*"
+    "slider", "sqoop", "storm", "flume","hcatalog", "phoenix", "ranger", "accumulo", "hive_*",
+    "pig_", "pig-", "pig." # there's a default 'pigz' package which we should avoid
   ]
   
 
@@ -106,7 +106,7 @@ class CheckHost(Script):
   
   # ignore repos from the list of repos to be cleaned
   IGNORE_REPOS = [
-    "HDP-UTILS", "AMBARI", "BASE"
+    "HDP-UTILS", "AMBARI", "BASE", "EXTRAS"
   ]
   
   def __init__(self):


[44/50] ambari git commit: AMBARI-14127. App Timeline Server fails to start after cluster creation on non-hdfs fs (aonishuk)

Posted by nc...@apache.org.
AMBARI-14127. App Timeline Server fails to start after cluster creation on non-hdfs fs (aonishuk)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/1d01a85a
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/1d01a85a
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/1d01a85a

Branch: refs/heads/branch-dev-patch-upgrade
Commit: 1d01a85adef1aa0d6f66e83203c45efbe9038ae5
Parents: 5cc99b2
Author: Andrew Onishuk <ao...@hortonworks.com>
Authored: Tue Dec 1 13:38:01 2015 +0200
Committer: Andrew Onishuk <ao...@hortonworks.com>
Committed: Tue Dec 1 13:38:35 2015 +0200

----------------------------------------------------------------------
 .../before-START/files/fast-hdfs-resource.jar   | Bin 19285282 -> 19285353 bytes
 .../ambari/fast_hdfs_resource/Resource.java     |   2 +-
 2 files changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/1d01a85a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/files/fast-hdfs-resource.jar
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/files/fast-hdfs-resource.jar b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/files/fast-hdfs-resource.jar
index defde28..98a7f66 100644
Binary files a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/files/fast-hdfs-resource.jar and b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/files/fast-hdfs-resource.jar differ

http://git-wip-us.apache.org/repos/asf/ambari/blob/1d01a85a/contrib/fast-hdfs-resource/src/main/java/org/apache/ambari/fast_hdfs_resource/Resource.java
----------------------------------------------------------------------
diff --git a/contrib/fast-hdfs-resource/src/main/java/org/apache/ambari/fast_hdfs_resource/Resource.java b/contrib/fast-hdfs-resource/src/main/java/org/apache/ambari/fast_hdfs_resource/Resource.java
index 21750e1..3d2f182 100644
--- a/contrib/fast-hdfs-resource/src/main/java/org/apache/ambari/fast_hdfs_resource/Resource.java
+++ b/contrib/fast-hdfs-resource/src/main/java/org/apache/ambari/fast_hdfs_resource/Resource.java
@@ -214,7 +214,7 @@ public class Resource {
       FileSystem dfs, Path pathHadoop) throws IOException {
 
     if (resource.getMode() != null) {
-      FsPermission permission = new FsPermission(resource.getMode());
+      FsPermission permission = new FsPermission(Short.valueOf(resource.getMode()));
       dfs.setPermission(pathHadoop, permission);
 
       // Recursive


[35/50] ambari git commit: Updated contributor list. (yusaku)

Posted by nc...@apache.org.
Updated contributor list. (yusaku)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/67672e63
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/67672e63
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/67672e63

Branch: refs/heads/branch-dev-patch-upgrade
Commit: 67672e6331f30b7401fc59a1b716c0f68ad071b5
Parents: ca53dfd
Author: Yusaku Sako <yu...@hortonworks.com>
Authored: Mon Nov 30 17:01:00 2015 -0800
Committer: Yusaku Sako <yu...@hortonworks.com>
Committed: Mon Nov 30 17:01:00 2015 -0800

----------------------------------------------------------------------
 docs/pom.xml | 8 ++++++++
 1 file changed, 8 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/67672e63/docs/pom.xml
----------------------------------------------------------------------
diff --git a/docs/pom.xml b/docs/pom.xml
index ada80b3..94712b5 100644
--- a/docs/pom.xml
+++ b/docs/pom.xml
@@ -833,6 +833,10 @@
           <organization>Hortonworks</organization>
         </contributor>
         <contributor>
+          <name>Pallav Kulshreshtha</name>
+          <organization>Hortonworks</organization>
+        </contributor>
+        <contributor>
           <name>Olivier Lamy</name>
           <organization></organization>
         </contributor>
@@ -973,6 +977,10 @@
           <organization>Hortonworks</organization>
         </contributor>
         <contributor>
+          <name>Aravindan Vijayan</name>
+          <organization>Hortonworks</organization>
+        </contributor>
+        <contributor>
           <name>Ivan Wang</name>
           <organization>Pivotal</organization>
         </contributor>


[37/50] ambari git commit: Revert "AMBARI-14095. Upgrade: second click shows diff results"

Posted by nc...@apache.org.
Revert "AMBARI-14095. Upgrade: second click shows diff results"

This reverts commit b470e3e7d9cd78b747f6cc08e100e9a2b8b709fc.


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/278d4c9e
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/278d4c9e
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/278d4c9e

Branch: refs/heads/branch-dev-patch-upgrade
Commit: 278d4c9ebe854ef66a323fef47f663eda809dca1
Parents: 24094e1
Author: Jaimin Jetly <ja...@hortonworks.com>
Authored: Mon Nov 30 17:43:21 2015 -0800
Committer: Jaimin Jetly <ja...@hortonworks.com>
Committed: Mon Nov 30 17:43:21 2015 -0800

----------------------------------------------------------------------
 ambari-web/app/assets/test/tests.js             |   1 -
 .../app/controllers/main/admin/kerberos.js      |   8 +-
 .../main/admin/stack_and_upgrade_controller.js  |  42 +--
 .../modal_popups/cluster_check_dialog.hbs       |  24 +-
 .../common/modal_popups/cluster_check_popup.js  | 107 ++++++--
 .../admin/stack_and_upgrade_controller_test.js  |   2 +-
 .../modal_popups/cluster_check_popup_test.js    | 271 -------------------
 7 files changed, 103 insertions(+), 352 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/278d4c9e/ambari-web/app/assets/test/tests.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/assets/test/tests.js b/ambari-web/app/assets/test/tests.js
index 23460b7..f8d59f1 100644
--- a/ambari-web/app/assets/test/tests.js
+++ b/ambari-web/app/assets/test/tests.js
@@ -202,7 +202,6 @@ var files = [
   'test/views/common/widget/graph_widget_view_test',
   'test/views/common/widget/number_widget_view_test',
   'test/views/common/widget/gauge_widget_view_test',
-  'test/views/common/modal_popups/cluster_check_popup_test',
   'test/views/common/modal_popups/hosts_table_list_popup_test',
   'test/views/common/modal_popups/dependent_configs_list_popup_test',
   'test/views/main/admin_test',

http://git-wip-us.apache.org/repos/asf/ambari/blob/278d4c9e/ambari-web/app/controllers/main/admin/kerberos.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/controllers/main/admin/kerberos.js b/ambari-web/app/controllers/main/admin/kerberos.js
index ac2a332..57ee8c1 100644
--- a/ambari-web/app/controllers/main/admin/kerberos.js
+++ b/ambari-web/app/controllers/main/admin/kerberos.js
@@ -221,16 +221,10 @@ App.MainAdminKerberosController = App.KerberosWizardStep4Controller.extend({
   runSecurityCheckSuccess: function (data, opt, params) {
     //TODO correct check
     if (data.items.someProperty('UpgradeChecks.status', "FAIL")) {
-      var hasFails = data.items.someProperty('UpgradeChecks.status', 'FAIL');
       var header = Em.I18n.t('popup.clusterCheck.Security.header').format(params.label);
       var title = Em.I18n.t('popup.clusterCheck.Security.title');
       var alert = Em.I18n.t('popup.clusterCheck.Security.alert');
-      App.showClusterCheckPopup(data, {
-        header: header,
-        failTitle: title,
-        failAlert: alert,
-        noCallbackCondition: hasFails
-      });
+      App.showClusterCheckPopup(data, header, title, alert);
     } else {
       this.startKerberosWizard();
     }

http://git-wip-us.apache.org/repos/asf/ambari/blob/278d4c9e/ambari-web/app/controllers/main/admin/stack_and_upgrade_controller.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/controllers/main/admin/stack_and_upgrade_controller.js b/ambari-web/app/controllers/main/admin/stack_and_upgrade_controller.js
index 7094d67..f331540 100644
--- a/ambari-web/app/controllers/main/admin/stack_and_upgrade_controller.js
+++ b/ambari-web/app/controllers/main/admin/stack_and_upgrade_controller.js
@@ -720,10 +720,8 @@ App.MainAdminStackAndUpgradeController = Em.Controller.extend(App.LocalStorage,
             configsMergeWarning = data.items.findProperty('UpgradeChecks.id', "CONFIG_MERGE"),
             configs = [];
           if (configsMergeWarning && Em.get(configsMergeWarning, 'UpgradeChecks.status') === 'WARNING') {
-            var popupData = {
-                items: data.items.rejectProperty('UpgradeChecks.id', 'CONFIG_MERGE')
-              },
-              configsMergeCheckData = Em.get(configsMergeWarning, 'UpgradeChecks.failed_detail');
+            data.items = data.items.rejectProperty('UpgradeChecks.id', 'CONFIG_MERGE');
+            var configsMergeCheckData = Em.get(configsMergeWarning, 'UpgradeChecks.failed_detail');
             if (configsMergeCheckData) {
               configs = configsMergeCheckData.map(function (item) {
                 var isDeprecated = Em.isNone(item.new_stack_value),
@@ -740,21 +738,12 @@ App.MainAdminStackAndUpgradeController = Em.Controller.extend(App.LocalStorage,
               });
             }
           }
-          App.showClusterCheckPopup(popupData, {
-            header: header,
-            failTitle: failTitle,
-            failAlert: failAlert,
-            warningTitle: warningTitle,
-            warningAlert: warningAlert,
-            primary: Em.I18n.t('admin.stackVersions.version.upgrade.upgradeOptions.preCheck.rerun'),
-            secondary: Em.I18n.t('common.cancel'),
-            callback: function () {
-              self.runPreUpgradeCheckOnly.call(self, {
-                value: version.get('repositoryVersion'),
-                label: version.get('displayName'),
-                type: event.context.get('type')
-              });
-            }
+          App.showPreUpgradeCheckPopup(data, header, failTitle, failAlert, warningTitle, warningAlert, function () {
+            self.runPreUpgradeCheckOnly.call(self, {
+              value: version.get('repositoryVersion'),
+              label: version.get('displayName'),
+              type: event.context.get('type')
+            });
           }, configs, version.get('displayName'));
         }
       }),
@@ -969,8 +958,7 @@ App.MainAdminStackAndUpgradeController = Em.Controller.extend(App.LocalStorage,
     var self = this;
     if (data.items.someProperty('UpgradeChecks.status', 'FAIL') || data.items.someProperty('UpgradeChecks.status', 'WARNING')) {
       this.set('requestInProgress', false);
-      var hasFails = data.items.someProperty('UpgradeChecks.status', 'FAIL'),
-        header = Em.I18n.t('popup.clusterCheck.Upgrade.header').format(params.label),
+      var header = Em.I18n.t('popup.clusterCheck.Upgrade.header').format(params.label),
         failTitle = Em.I18n.t('popup.clusterCheck.Upgrade.fail.title'),
         failAlert = new Em.Handlebars.SafeString(Em.I18n.t('popup.clusterCheck.Upgrade.fail.alert')),
         warningTitle = Em.I18n.t('popup.clusterCheck.Upgrade.warning.title'),
@@ -996,16 +984,8 @@ App.MainAdminStackAndUpgradeController = Em.Controller.extend(App.LocalStorage,
           });
         }
       }
-      App.showClusterCheckPopup(data, {
-        header: header,
-        failTitle: failTitle,
-        failAlert: failAlert,
-        warningTitle: warningTitle,
-        warningAlert: warningAlert,
-        noCallbackCondition: hasFails,
-        callback: function () {
-          self.upgrade(params);
-        }
+      App.showClusterCheckPopup(data, header, failTitle, failAlert, warningTitle, warningAlert, function () {
+        self.upgrade(params);
       }, configs, params.label);
     } else {
       this.upgrade(params);

http://git-wip-us.apache.org/repos/asf/ambari/blob/278d4c9e/ambari-web/app/templates/common/modal_popups/cluster_check_dialog.hbs
----------------------------------------------------------------------
diff --git a/ambari-web/app/templates/common/modal_popups/cluster_check_dialog.hbs b/ambari-web/app/templates/common/modal_popups/cluster_check_dialog.hbs
index 8fd2746..8767d70 100644
--- a/ambari-web/app/templates/common/modal_popups/cluster_check_dialog.hbs
+++ b/ambari-web/app/templates/common/modal_popups/cluster_check_dialog.hbs
@@ -20,14 +20,10 @@
     <i class="icon-ok"></i>&nbsp;<span>{{t admin.stackVersions.version.upgrade.upgradeOptions.preCheck.allPassed.msg}}</span>
   {{/if}}
   {{#if view.fails.length}}
-    {{#if view.failTitle}}
-      <h4>{{view.failTitle}}</h4>
-    {{/if}}
-    {{#if view.failAlert}}
-      <div class="alert alert-warning">
-        {{view.failAlert}}
-      </div>
-    {{/if}}
+    <h4>{{view.failTitle}}</h4>
+    <div class="alert alert-warning">
+      {{view.failAlert}}
+    </div>
     <div class="limited-height-2">
       {{#each item in view.fails}}
         <i class="icon-remove"></i>&nbsp;<span>{{item.UpgradeChecks.check}}</span>
@@ -36,14 +32,10 @@
     </div>
   {{/if}}
   {{#if view.warnings.length}}
-    {{#if view.warningTitle}}
-      <h4>{{view.warningTitle}}</h4>
-    {{/if}}
-    {{#if view.warningAlert}}
-      <div class="alert alert-warning">
-        {{view.warningAlert}}
-      </div>
-    {{/if}}
+    <h4>{{view.warningTitle}}</h4>
+    <div class="alert alert-warning">
+      {{view.warningAlert}}
+    </div>
     <div class="limited-height-2">
       {{#each item in view.warnings}}
         <i class="icon-warning-sign"></i>&nbsp;<span>{{item.UpgradeChecks.check}}</span>

http://git-wip-us.apache.org/repos/asf/ambari/blob/278d4c9e/ambari-web/app/views/common/modal_popups/cluster_check_popup.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views/common/modal_popups/cluster_check_popup.js b/ambari-web/app/views/common/modal_popups/cluster_check_popup.js
index 54f20ba..681c807 100644
--- a/ambari-web/app/views/common/modal_popups/cluster_check_popup.js
+++ b/ambari-web/app/views/common/modal_popups/cluster_check_popup.js
@@ -22,33 +22,30 @@ var App = require('app');
  * popup to display requirements that are not met
  * for current action
  * @param data
- * @param popup
+ * @param header
+ * @param failTitle
+ * @param failAlert
+ * @param warningTitle
+ * @param warningAlert
+ * @param callback
  * @param configs
  * @param upgradeVersion
  * @returns {*|void}
  */
-App.showClusterCheckPopup = function (data, popup, configs, upgradeVersion) {
+App.showClusterCheckPopup = function (data, header, failTitle, failAlert, warningTitle, warningAlert, callback, configs, upgradeVersion) {
   var fails = data.items.filterProperty('UpgradeChecks.status', 'FAIL'),
     warnings = data.items.filterProperty('UpgradeChecks.status', 'WARNING'),
     hasConfigsMergeConflicts = !!(configs && configs.length),
-    primary,
-    secondary,
-    popupBody;
-  popup = popup || {};
-  primary = Em.isNone(popup.primary) ?
-    (fails.length ? Em.I18n.t('common.dismiss') : Em.I18n.t('common.proceedAnyway')) : popup.primary;
-  secondary = Em.isNone(popup.secondary) ? (fails.length ? false : Em.I18n.t('common.cancel')) : popup.secondary;
-  popupBody = {
-    failTitle: popup.failTitle,
-    failAlert: popup.failAlert,
-    warningTitle: popup.warningTitle,
-    warningAlert: popup.warningAlert,
-    templateName: require('templates/common/modal_popups/cluster_check_dialog'),
-    fails: fails,
-    warnings: warnings,
-    hasConfigsMergeConflicts: hasConfigsMergeConflicts,
-    isAllPassed: !fails.length && !warnings.length && !hasConfigsMergeConflicts
-  };
+    popupBody = {
+      failTitle: failTitle,
+      failAlert: failAlert,
+      warningTitle: warningTitle,
+      warningAlert: warningAlert,
+      templateName: require('templates/common/modal_popups/cluster_check_dialog'),
+      fails: fails,
+      warnings: warnings,
+      hasConfigsMergeConflicts: hasConfigsMergeConflicts
+    };
   if (hasConfigsMergeConflicts) {
     popupBody.configsMergeTable = Em.View.extend({
       templateName: require('templates/main/admin/stack_upgrade/upgrade_configs_merge_table'),
@@ -61,15 +58,75 @@ App.showClusterCheckPopup = function (data, popup, configs, upgradeVersion) {
     });
   }
   return App.ModalPopup.show({
-    primary: primary,
-    secondary: secondary,
-    header: popup.header,
+    primary: fails.length ? Em.I18n.t('common.dismiss') : Em.I18n.t('common.proceedAnyway'),
+    secondary: fails.length ? false : Em.I18n.t('common.cancel'),
+    header: header,
+    classNames: ['cluster-check-popup'],
+    bodyClass: Em.View.extend(popupBody),
+    onPrimary: function () {
+      if (!fails.length && callback) {
+        callback();
+      }
+      this._super();
+    },
+    didInsertElement: function () {
+      this._super();
+      this.fitHeight();
+    }
+  });
+};
+
+
+/**
+ * popup to display requirements that are not met
+ * for current action
+ * @param data
+ * @param header
+ * @param failTitle
+ * @param failAlert
+ * @param warningTitle
+ * @param warningAlert
+ * @param callback
+ * @param configs
+ * @param upgradeVersion
+ * @returns {*|void}
+ */
+App.showPreUpgradeCheckPopup = function (data, header, failTitle, failAlert, warningTitle, warningAlert, callback, configs, upgradeVersion) {
+  var fails = data.items.filterProperty('UpgradeChecks.status', 'FAIL'),
+    warnings = data.items.filterProperty('UpgradeChecks.status', 'WARNING'),
+    hasConfigsMergeConflicts = !!(configs && configs.length),
+    popupBody = {
+      failTitle: failTitle,
+      failAlert: failAlert,
+      warningTitle: warningTitle,
+      warningAlert: warningAlert,
+      templateName: require('templates/common/modal_popups/cluster_check_dialog'),
+      fails: fails,
+      warnings: warnings,
+      hasConfigsMergeConflicts: hasConfigsMergeConflicts,
+      isAllPassed: !fails.length && !warnings.length && !hasConfigsMergeConflicts.length
+    };
+  if (hasConfigsMergeConflicts) {
+    popupBody.configsMergeTable = Em.View.extend({
+      templateName: require('templates/main/admin/stack_upgrade/upgrade_configs_merge_table'),
+      configs: configs,
+      didInsertElement: function () {
+        App.tooltip($('.recommended-value'), {
+          title: upgradeVersion
+        });
+      }
+    });
+  }
+  return App.ModalPopup.show({
+    primary: Em.I18n.t('admin.stackVersions.version.upgrade.upgradeOptions.preCheck.rerun'),
+    secondary: Em.I18n.t('common.cancel'),
+    header: header,
     classNames: ['cluster-check-popup'],
     bodyClass: Em.View.extend(popupBody),
     onPrimary: function () {
       this._super();
-      if (!popup.noCallbackCondition && popup.callback) {
-        popup.callback();
+      if (callback) {
+        callback();
       }
     },
     didInsertElement: function () {

http://git-wip-us.apache.org/repos/asf/ambari/blob/278d4c9e/ambari-web/test/controllers/main/admin/stack_and_upgrade_controller_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/controllers/main/admin/stack_and_upgrade_controller_test.js b/ambari-web/test/controllers/main/admin/stack_and_upgrade_controller_test.js
index b9c7cb7..65739cd 100644
--- a/ambari-web/test/controllers/main/admin/stack_and_upgrade_controller_test.js
+++ b/ambari-web/test/controllers/main/admin/stack_and_upgrade_controller_test.js
@@ -454,7 +454,7 @@ describe('App.MainAdminStackAndUpgradeController', function() {
         expect(controller.upgrade.callCount).to.equal(item.upgradeCalledCount);
         expect(App.showClusterCheckPopup.callCount).to.equal(item.showClusterCheckPopupCalledCount);
         if (item.check.id == 'CONFIG_MERGE') {
-          expect(App.showClusterCheckPopup.firstCall.args[2]).to.eql(item.configs);
+          expect(App.showClusterCheckPopup.firstCall.args[7]).to.eql(item.configs);
         }
       });
     });

http://git-wip-us.apache.org/repos/asf/ambari/blob/278d4c9e/ambari-web/test/views/common/modal_popups/cluster_check_popup_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/views/common/modal_popups/cluster_check_popup_test.js b/ambari-web/test/views/common/modal_popups/cluster_check_popup_test.js
deleted file mode 100644
index 7f02730..0000000
--- a/ambari-web/test/views/common/modal_popups/cluster_check_popup_test.js
+++ /dev/null
@@ -1,271 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-var App = require('app');
-require('views/common/modal_popups/cluster_check_popup');
-
-describe('App.showClusterCheckPopup', function () {
-
-  var isCallbackExecuted,
-    callback = function () {
-      isCallbackExecuted = true;
-    },
-    cases = [
-      {
-        inputData: {
-          data: {
-            items: [
-              {
-                UpgradeChecks: {
-                  id: 'p0',
-                  status: 'PASS'
-                }
-              },
-              {
-                UpgradeChecks: {
-                  id: 'p1',
-                  status: 'PASS'
-                }
-              }
-            ]
-          }
-        },
-        result: {
-          primary: Em.I18n.t('common.proceedAnyway'),
-          secondary: Em.I18n.t('common.cancel'),
-          header: '&nbsp;'
-        },
-        bodyResult: {
-          failTitle: undefined,
-          failAlert: undefined,
-          warningTitle: undefined,
-          warningAlert: undefined,
-          fails: [],
-          warnings: [],
-          hasConfigsMergeConflicts: false,
-          isAllPassed: true
-        },
-        isCallbackExecuted: false,
-        title: 'no fails, no warnings, no popup customization'
-      },
-      {
-        inputData: {
-          data: {
-            items: [
-              {
-                UpgradeChecks: {
-                  id: 'w0',
-                  status: 'WARNING'
-                }
-              },
-              {
-                UpgradeChecks: {
-                  id: 'w1',
-                  status: 'WARNING'
-                }
-              }
-            ]
-          },
-          popup: {
-            header: 'checks',
-            failTitle: 'fail',
-            failAlert: 'something has failed',
-            warningTitle: 'warning',
-            warningAlert: 'something is not good',
-            callback: callback
-          }
-        },
-        result: {
-          primary: Em.I18n.t('common.proceedAnyway'),
-          secondary: Em.I18n.t('common.cancel'),
-          header: 'checks'
-        },
-        bodyResult: {
-          failTitle: 'fail',
-          failAlert: 'something has failed',
-          warningTitle: 'warning',
-          warningAlert: 'something is not good',
-          fails: [],
-          warnings: [
-            {
-              UpgradeChecks: {
-                id: 'w0',
-                status: 'WARNING'
-              }
-            },
-            {
-              UpgradeChecks: {
-                id: 'w1',
-                status: 'WARNING'
-              }
-            }
-          ],
-          hasConfigsMergeConflicts: false,
-          isAllPassed: false
-        },
-        isCallbackExecuted: true,
-        title: 'no fails, default buttons, callback executed'
-      },
-      {
-        inputData: {
-          data: {
-            items: [
-              {
-                UpgradeChecks: {
-                  id: 'f0',
-                  status: 'FAIL'
-                }
-              },
-              {
-                UpgradeChecks: {
-                  id: 'f1',
-                  status: 'FAIL'
-                }
-              }
-            ]
-          },
-          popup: {
-            callback: callback,
-            noCallbackCondition: true
-          }
-        },
-        result: {
-          primary: Em.I18n.t('common.dismiss'),
-          secondary: false,
-          header: '&nbsp;'
-        },
-        bodyResult: {
-          failTitle: undefined,
-          failAlert: undefined,
-          warningTitle: undefined,
-          warningAlert: undefined,
-          fails: [
-            {
-              UpgradeChecks: {
-                id: 'f0',
-                status: 'FAIL'
-              }
-            },
-            {
-              UpgradeChecks: {
-                id: 'f1',
-                status: 'FAIL'
-              }
-            }
-          ],
-          warnings: [],
-          hasConfigsMergeConflicts: false,
-          isAllPassed: false
-        },
-        isCallbackExecuted: false,
-        title: 'fails detected, default buttons, callback not executed'
-      },
-      {
-        inputData: {
-          data: {
-            items: [
-              {
-                UpgradeChecks: {
-                  id: 'p0',
-                  status: 'PASS'
-                }
-              },
-              {
-                UpgradeChecks: {
-                  id: 'p1',
-                  status: 'PASS'
-                }
-              }
-            ]
-          },
-          popup: {
-            primary: 'ok',
-            secondary: 'cancel'
-          },
-          configs: [
-            {
-              name: 'c0'
-            },
-            {
-              name: 'c1'
-            }
-          ],
-          upgradeVersion: 'HDP-2.3.0.0'
-        },
-        result: {
-          primary: 'ok',
-          secondary: 'cancel',
-          header: '&nbsp;'
-        },
-        bodyResult: {
-          failTitle: undefined,
-          failAlert: undefined,
-          warningTitle: undefined,
-          warningAlert: undefined,
-          fails: [],
-          warnings: [],
-          hasConfigsMergeConflicts: true,
-          isAllPassed: false
-        },
-        configsResult: [
-          {
-            name: 'c0'
-          },
-          {
-            name: 'c1'
-          }
-        ],
-        isCallbackExecuted: false,
-        title: 'configs merge conflicts detected, custom buttons'
-      }
-    ];
-
-  beforeEach(function () {
-    isCallbackExecuted = false;
-    sinon.stub(App, 'tooltip', Em.K);
-  });
-
-  afterEach(function () {
-    App.tooltip.restore();
-  });
-
-  cases.forEach(function (item) {
-    it(item.title, function () {
-      var popup = App.showClusterCheckPopup(item.inputData.data, item.inputData.popup, item.inputData.configs, item.inputData.upgradeVersion),
-        popupBody = popup.bodyClass.create();
-      popup.onPrimary();
-      Em.keys(item.result).forEach(function (key) {
-        expect(popup[key]).to.equal(item.result[key]);
-      });
-      Em.keys(item.bodyResult).forEach(function (key) {
-        expect(popupBody[key]).to.eql(item.bodyResult[key]);
-      });
-      expect(isCallbackExecuted).to.equal(item.isCallbackExecuted);
-      if (item.bodyResult.hasConfigsMergeConflicts) {
-        var configsMergeTable = popupBody.configsMergeTable.create();
-        configsMergeTable.didInsertElement();
-        expect(configsMergeTable.configs).to.eql(item.configsResult);
-        expect(App.tooltip.calledOnce).to.be.true;
-        expect(App.tooltip.firstCall.args[1].title).to.equal(item.inputData.upgradeVersion);
-      } else {
-        expect(App.tooltip.calledOnce).to.be.false;
-      }
-    });
-  });
-
-});


[33/50] ambari git commit: Revert "AMBARI-14058. "Application Tracking URL" in Tez View broken due to RM HA changes in Ambari views framework. (Dipayan Bhowmick)"

Posted by nc...@apache.org.
Revert "AMBARI-14058. "Application Tracking URL" in Tez View broken due to RM HA changes in Ambari views framework. (Dipayan Bhowmick)"

This reverts commit 89c6f26286540651ee46bcb50f618014863c5e79.


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/1f1c661a
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/1f1c661a
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/1f1c661a

Branch: refs/heads/branch-dev-patch-upgrade
Commit: 1f1c661a100f4136d24c9526a18c3d920a5bff8f
Parents: 006f0fe
Author: Yusaku Sako <yu...@hortonworks.com>
Authored: Mon Nov 30 16:11:55 2015 -0800
Committer: Yusaku Sako <yu...@hortonworks.com>
Committed: Mon Nov 30 16:11:55 2015 -0800

----------------------------------------------------------------------
 .../ambari/view/tez/rest/BaseProxyResource.java | 21 +-------------------
 .../resources/ui/scripts/init-ambari-view.js    |  3 ---
 2 files changed, 1 insertion(+), 23 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/1f1c661a/contrib/views/tez/src/main/java/org/apache/ambari/view/tez/rest/BaseProxyResource.java
----------------------------------------------------------------------
diff --git a/contrib/views/tez/src/main/java/org/apache/ambari/view/tez/rest/BaseProxyResource.java b/contrib/views/tez/src/main/java/org/apache/ambari/view/tez/rest/BaseProxyResource.java
index 5f8fb91..3670a40 100644
--- a/contrib/views/tez/src/main/java/org/apache/ambari/view/tez/rest/BaseProxyResource.java
+++ b/contrib/views/tez/src/main/java/org/apache/ambari/view/tez/rest/BaseProxyResource.java
@@ -19,7 +19,6 @@
 package org.apache.ambari.view.tez.rest;
 
 import com.google.inject.Inject;
-import org.apache.ambari.view.tez.exceptions.ProxyException;
 import org.apache.ambari.view.tez.utils.ProxyHelper;
 import org.json.simple.JSONObject;
 import org.json.simple.JSONValue;
@@ -28,13 +27,7 @@ import javax.ws.rs.GET;
 import javax.ws.rs.Path;
 import javax.ws.rs.PathParam;
 import javax.ws.rs.Produces;
-import javax.ws.rs.core.Context;
-import javax.ws.rs.core.MediaType;
-import javax.ws.rs.core.MultivaluedMap;
-import javax.ws.rs.core.Response;
-import javax.ws.rs.core.UriInfo;
-import java.net.URI;
-import java.net.URISyntaxException;
+import javax.ws.rs.core.*;
 import java.util.HashMap;
 
 /**
@@ -57,18 +50,6 @@ public abstract class BaseProxyResource {
     String response = proxyHelper.getResponse(url, new HashMap<String, String>());
 
     JSONObject jsonObject = (JSONObject) JSONValue.parse(response);
-
-    // If the endpoint returns some other format apart from JSON,
-    // we will only redirect the request. This is required because UI may call
-    // the proxy endpoint to directly point to any URL of RM/ATS.
-    if (jsonObject == null) {
-      try {
-        return Response.temporaryRedirect(new URI(url)).build();
-      } catch (URISyntaxException e) {
-        throw new ProxyException("Failed to set the redirection url to : " + url + ".Internal Error.",
-          Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(), e.getMessage());
-      }
-    }
     return Response.ok(jsonObject).type(MediaType.APPLICATION_JSON).build();
   }
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/1f1c661a/contrib/views/tez/src/main/resources/ui/scripts/init-ambari-view.js
----------------------------------------------------------------------
diff --git a/contrib/views/tez/src/main/resources/ui/scripts/init-ambari-view.js b/contrib/views/tez/src/main/resources/ui/scripts/init-ambari-view.js
index c443726..5152fb9 100644
--- a/contrib/views/tez/src/main/resources/ui/scripts/init-ambari-view.js
+++ b/contrib/views/tez/src/main/resources/ui/scripts/init-ambari-view.js
@@ -164,9 +164,6 @@ function setConfigs() {
       aminfo: '%@rmproxy/proxy/__app_id__/ws/v1/tez'.fmt(resourcesPrefix),
       aminfoV2: '%@rmproxy/proxy/__app_id__/ws/v2/tez'.fmt(resourcesPrefix),
       cluster: '%@rmproxy/ws/v1/cluster'.fmt(resourcesPrefix)
-    },
-    otherNamespace: {
-      cluster: '%@rmproxy/cluster'.fmt(resourcesPrefix)
     }
   });
 


[07/50] ambari git commit: AMBARI-14097. Should option "Enable RM HA" be available if no RM installed on the cluster (onechiporenko)

Posted by nc...@apache.org.
AMBARI-14097. Should option "Enable RM HA" be available if no RM installed on the cluster (onechiporenko)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/667e1e8b
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/667e1e8b
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/667e1e8b

Branch: refs/heads/branch-dev-patch-upgrade
Commit: 667e1e8be2b84903248cad302ebffe39795183f7
Parents: 00cb7ba
Author: Oleg Nechiporenko <on...@apache.org>
Authored: Fri Nov 27 14:37:05 2015 +0200
Committer: Oleg Nechiporenko <on...@apache.org>
Committed: Fri Nov 27 14:39:21 2015 +0200

----------------------------------------------------------------------
 ambari-web/app/models/host_component.js         | 19 ++++-
 ambari-web/test/models/host_component_test.js   | 25 +++++++
 ambari-web/test/views/main/service/item_test.js | 77 ++++++++++++--------
 3 files changed, 86 insertions(+), 35 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/667e1e8b/ambari-web/app/models/host_component.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/models/host_component.js b/ambari-web/app/models/host_component.js
index da87071..db750e0 100644
--- a/ambari-web/app/models/host_component.js
+++ b/ambari-web/app/models/host_component.js
@@ -52,6 +52,14 @@ App.HostComponent = DS.Model.extend({
   isRunning: Em.computed.existsIn('workStatus', ['STARTED', 'STARTING']),
 
   /**
+   * Determines if component is not installed
+   * Based on <code>workStatus</code>
+   *
+   * @type {boolean}
+   */
+  isNotInstalled: Em.computed.existsIn('workStatus', ['INIT', 'INSTALL_FAILED']),
+
+  /**
    * Formatted <code>componentName</code>
    * @returns {String}
    */
@@ -245,6 +253,9 @@ App.HostComponentStatus = {
 
 App.HostComponentActionMap = {
   getMap: function(ctx) {
+    var NN = ctx.get('controller.content.hostComponents').findProperty('componentName', 'NAMENODE');
+    var RM = ctx.get('controller.content.hostComponents').findProperty('componentName', 'RESOURCEMANAGER');
+    var RA = ctx.get('controller.content.hostComponents').findProperty('componentName', 'RANGER_ADMIN');
     return {
       RESTART_ALL: {
         action: 'restartAllHostComponents',
@@ -292,21 +303,21 @@ App.HostComponentActionMap = {
         label: App.get('isHaEnabled') ? Em.I18n.t('admin.highAvailability.button.disable') : Em.I18n.t('admin.highAvailability.button.enable'),
         cssClass: App.get('isHaEnabled') ? 'icon-arrow-down' : 'icon-arrow-up',
         isHidden: App.get('isHaEnabled'),
-        disabled: App.get('isSingleNode')
+        disabled: App.get('isSingleNode') || !NN || NN.get('isNotInstalled')
       },
       TOGGLE_RM_HA: {
         action: 'enableRMHighAvailability',
         label: Em.I18n.t('admin.rm_highAvailability.button.enable'),
         cssClass: 'icon-arrow-up',
         isHidden: App.get('isRMHaEnabled'),
-        disabled: App.get('isSingleNode')
+        disabled: App.get('isSingleNode') || !RM || RM.get('isNotInstalled')
       },
       TOGGLE_RA_HA: {
         action: 'enableRAHighAvailability',
         label: Em.I18n.t('admin.ra_highAvailability.button.enable'),
         cssClass: 'icon-arrow-up',
         isHidden: App.get('isRAHaEnabled'),
-        disabled: App.get('isSingleNode')
+        disabled: App.get('isSingleNode') || !RA || RA.get('isNotInstalled')
       },
       MOVE_COMPONENT: {
         action: 'reassignMaster',
@@ -353,6 +364,6 @@ App.HostComponentActionMap = {
         isHidden: false,
         disabled: false
       }
-    }
+    };
   }
 };
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/667e1e8b/ambari-web/test/models/host_component_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/models/host_component_test.js b/ambari-web/test/models/host_component_test.js
index 7b4cf74..6ba6fb0 100644
--- a/ambari-web/test/models/host_component_test.js
+++ b/ambari-web/test/models/host_component_test.js
@@ -314,4 +314,29 @@ describe('App.HostComponent', function() {
       });
     });
   });
+
+  describe('#isNotInstalled', function () {
+
+    Em.A([
+      {
+        p: {workStatus: 'INIT'},
+        e: true
+      },
+      {
+        p: {workStatus: 'INSTALL_FAILED'},
+        e: true
+      },
+      {
+        p: {workStatus: 'STARTED'},
+        e: false
+      }
+    ]).forEach(function (test, index) {
+      it('#' + (index + 1), function() {
+        hc.setProperties(test.p);
+        expect(hc.get('isNotInstalled')).to.equal(test.e);
+      });
+    });
+
+  });
+
 });
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/667e1e8b/ambari-web/test/views/main/service/item_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/views/main/service/item_test.js b/ambari-web/test/views/main/service/item_test.js
index 155e094..560760a 100644
--- a/ambari-web/test/views/main/service/item_test.js
+++ b/ambari-web/test/views/main/service/item_test.js
@@ -144,6 +144,7 @@ describe('App.MainServiceItemView', function () {
           hostComponents: [
             Em.Object.create({
               componentName: 'NAMENODE',
+              isNotInstalled: true,
               isMaster: true,
               isSlave: false
             }),
@@ -220,7 +221,8 @@ describe('App.MainServiceItemView', function () {
             Em.Object.create({
               componentName: 'RESOURCEMANAGER',
               isMaster: true,
-              isSlave: false
+              isSlave: false,
+              isNotInstalled: false
             })
           ],
           result: [
@@ -420,7 +422,7 @@ describe('App.MainServiceItemView', function () {
       sinon.stub(App, 'get', function (k) {
         switch (k) {
           case 'isSingleNode':
-            return (view.get('controller.content.serviceName') == 'HDFS');
+            return view.get('controller.content.serviceName') == 'HDFS';
           case 'supports.autoRollbackHA':
           case 'isRMHaEnabled':
           case 'isHaEnabled':
@@ -504,39 +506,52 @@ describe('App.MainServiceItemView', function () {
 
     testCases.forEach(function (testCase) {
 
-      it('Maintenance for ' + testCase.serviceName + ' service', function () {
-        view.reopen({
-          controller: Em.Object.create({
-            content: Em.Object.create({
-              hostComponents: testCase.hostComponents,
-              slaveComponents: testCase.slaveComponents,
-              clientComponents: testCase.clientComponents,
-              serviceName: testCase.serviceName,
-              displayName: testCase.displayName,
-              serviceTypes: testCase.serviceTypes,
-              passiveState: 'OFF'
+      describe('Maintenance for ' + testCase.serviceName + ' service', function () {
+
+        beforeEach(function () {
+          view.reopen({
+            controller: Em.Object.create({
+              content: Em.Object.create({
+                hostComponents: testCase.hostComponents,
+                slaveComponents: testCase.slaveComponents,
+                clientComponents: testCase.clientComponents,
+                serviceName: testCase.serviceName,
+                displayName: testCase.displayName,
+                serviceTypes: testCase.serviceTypes,
+                passiveState: 'OFF'
+              }),
+              isSeveralClients: false,
+              clientComponents: [],
+              isStopDisabled: false
             }),
-            isSeveralClients: false,
-            clientComponents: [],
-            isStopDisabled: false
-          }),
-          mastersExcludedCommands: mastersExcludedCommands,
-          hasConfigTab: hasConfigTab
+            mastersExcludedCommands: mastersExcludedCommands,
+            hasConfigTab: hasConfigTab
+          });
+          if (testCase.controller) {
+            testCase.controller.forEach(function (item) {
+              Object.keys(item).forEach(function (key) {
+                view.set('controller.' + key, item[key]);
+              });
+            });
+          }
+          view.observeMaintenanceOnce();
         });
-        if (testCase.controller) {
-          testCase.controller.forEach(function (item) {
-            Object.keys(item).forEach(function (key) {
-              view.set('controller.' + key, item[key]);
+        testCase.result.forEach(function (option, index) {
+          Object.keys(option).forEach(function (key) {
+            it(option.action + ', key - ' + key, function () {
+              var r = view.get('maintenance')[index];
+              expect(Em.get(option, key)).to.eql(Em.get(r, key));
             });
           });
-        }
-        view.observeMaintenanceOnce();
-        expect(view.get('maintenance')).to.eql(testCase.result);
-        var oldMaintenance = JSON.parse(JSON.stringify(view.get('maintenance')));
-        view.set('controller.content.passiveState', 'ON');
-        view.observeMaintenanceOnce();
-        expect(view.get('maintenance')).to.not.eql(oldMaintenance);
-        expect(view.get('isMaintenanceSet')).to.be.true;
+        });
+
+        it('maintenance is updated', function () {
+          var oldMaintenance = JSON.parse(JSON.stringify(view.get('maintenance')));
+          view.set('controller.content.passiveState', 'ON');
+          view.observeMaintenanceOnce();
+          expect(view.get('maintenance')).to.not.eql(oldMaintenance);
+          expect(view.get('isMaintenanceSet')).to.be.true;
+        });
       });
 
     });


[24/50] ambari git commit: AMBARI-14117. Build failure in Hive view (Pallav Kulshreshtha via srimanth)

Posted by nc...@apache.org.
AMBARI-14117. Build failure in Hive view (Pallav Kulshreshtha via srimanth)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/c838f314
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/c838f314
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/c838f314

Branch: refs/heads/branch-dev-patch-upgrade
Commit: c838f314dde60700d89562b2dac66d29445623ab
Parents: bbe5e76
Author: Srimanth Gunturi <sg...@hortonworks.com>
Authored: Mon Nov 30 10:13:32 2015 -0800
Committer: Srimanth Gunturi <sg...@hortonworks.com>
Committed: Mon Nov 30 10:14:10 2015 -0800

----------------------------------------------------------------------
 contrib/views/hive/src/main/resources/ui/hive-web/bower.json | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/c838f314/contrib/views/hive/src/main/resources/ui/hive-web/bower.json
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/resources/ui/hive-web/bower.json b/contrib/views/hive/src/main/resources/ui/hive-web/bower.json
index 72cdf2a..6707d16 100644
--- a/contrib/views/hive/src/main/resources/ui/hive-web/bower.json
+++ b/contrib/views/hive/src/main/resources/ui/hive-web/bower.json
@@ -19,8 +19,8 @@
     "selectize": "~0.12.0",
     "pretender": "0.1.0",
     "ember-uploader": "0.3.9",
-    "polestar": "https://github.com/pallavkul/polestar.git",
-    "voyager": "https://github.com/pallavkul/voyager.git"
+    "polestar": "https://github.com/pallavkul/polestar.git#0.7.1",
+    "voyager": "https://github.com/pallavkul/voyager.git#0.7.1"
   },
   "resolutions": {
     "ember": "1.10.0"


[45/50] ambari git commit: AMBARI-14129 No error appeared after pid dir and log dir property set to value with space at start. (ababiichuk)

Posted by nc...@apache.org.
AMBARI-14129 No error appeared after pid dir and log dir property set to value with space at start. (ababiichuk)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/f74ec871
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/f74ec871
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/f74ec871

Branch: refs/heads/branch-dev-patch-upgrade
Commit: f74ec8712beb8414cbbe830aee16266bf4bd197e
Parents: 1d01a85
Author: aBabiichuk <ab...@cybervisiontech.com>
Authored: Tue Dec 1 13:57:44 2015 +0200
Committer: aBabiichuk <ab...@cybervisiontech.com>
Committed: Tue Dec 1 14:01:58 2015 +0200

----------------------------------------------------------------------
 .../AMBARI_METRICS/0.1.0/configuration/ams-env.xml      | 12 ++++++++++++
 .../0.1.0/configuration/ams-hbase-env.xml               |  6 ++++++
 .../0.1.0/configuration/ams-hbase-site.xml              |  6 ++++++
 .../AMBARI_METRICS/0.1.0/configuration/ams-site.xml     |  3 +++
 .../HBASE/0.96.0.2.0/configuration/hbase-env.xml        |  3 +++
 .../SPARK/1.2.0.2.2/configuration/spark-env.xml         |  6 ++++++
 .../0.8/services/HBASE/configuration/hbase-env.xml      |  6 ++++++
 .../0.8/services/HBASE/configuration/hbase-site.xml     |  3 +++
 .../services/HBASE/configuration/hbase-site.xml         |  3 +++
 .../services/HBASE/configuration/hbase-site.xml         |  3 +++
 .../HDP/2.2/services/HBASE/configuration/hbase-env.xml  |  2 ++
 .../services/HBASE/configuration/hbase-site.xml         |  3 +++
 .../services/AMBARI_METRICS/configuration/ams-env.xml   | 12 ++++++++++++
 13 files changed, 68 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/f74ec871/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/configuration/ams-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/configuration/ams-env.xml b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/configuration/ams-env.xml
index 41d8912..8384767 100644
--- a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/configuration/ams-env.xml
+++ b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/configuration/ams-env.xml
@@ -34,24 +34,36 @@
     <value>/var/log/ambari-metrics-collector</value>
     <display-name>Metrics Collector log dir</display-name>
     <description>Collector log directory.</description>
+    <value-attributes>
+      <type>directory</type>
+    </value-attributes>
   </property>
   <property>
     <name>metrics_collector_pid_dir</name>
     <value>/var/run/ambari-metrics-collector</value>
     <display-name>Metrics Collector pid dir</display-name>
     <description>Collector pid directory.</description>
+    <value-attributes>
+      <type>directory</type>
+    </value-attributes>
   </property>
   <property>
     <name>metrics_monitor_pid_dir</name>
     <value>/var/run/ambari-metrics-monitor</value>
     <display-name>Metrics Monitor pid dir</display-name>
     <description>Monitor pid directory.</description>
+    <value-attributes>
+      <type>directory</type>
+    </value-attributes>
   </property>
   <property>
     <name>metrics_monitor_log_dir</name>
     <value>/var/log/ambari-metrics-monitor</value>
     <display-name>Metrics Monitor log dir</display-name>
     <description>Monitor log directory.</description>
+    <value-attributes>
+      <type>directory</type>
+    </value-attributes>
   </property>
   <property>
     <name>metrics_collector_heapsize</name>

http://git-wip-us.apache.org/repos/asf/ambari/blob/f74ec871/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/configuration/ams-hbase-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/configuration/ams-hbase-env.xml b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/configuration/ams-hbase-env.xml
index d475932..a061006 100644
--- a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/configuration/ams-hbase-env.xml
+++ b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/configuration/ams-hbase-env.xml
@@ -25,11 +25,17 @@
     <name>hbase_log_dir</name>
     <value>/var/log/ambari-metrics-collector</value>
     <description>Log Directories for HBase.</description>
+    <value-attributes>
+      <type>directory</type>
+    </value-attributes>
   </property>
   <property>
     <name>hbase_pid_dir</name>
     <value>/var/run/ambari-metrics-collector/</value>
     <description>Pid Directory for HBase.</description>
+    <value-attributes>
+      <type>directory</type>
+    </value-attributes>
   </property>
   <property>
     <name>hbase_classpath_additional</name>

http://git-wip-us.apache.org/repos/asf/ambari/blob/f74ec871/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/configuration/ams-hbase-site.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/configuration/ams-hbase-site.xml b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/configuration/ams-hbase-site.xml
index 3f4a9d4..33ac1b7 100644
--- a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/configuration/ams-hbase-site.xml
+++ b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/configuration/ams-hbase-site.xml
@@ -39,6 +39,9 @@
       than '/tmp' (The '/tmp' directory is often cleared on
       machine restart).
     </description>
+    <value-attributes>
+      <type>directory</type>
+    </value-attributes>
   </property>
   <property>
     <name>hbase.local.dir</name>
@@ -313,6 +316,9 @@
       Set directory for Phoenix spill files. If possible set this to a
       different mount point from the one for hbase.rootdir in embedded mode.
     </description>
+    <value-attributes>
+      <type>directory</type>
+    </value-attributes>
   </property>
   <property>
     <name>phoenix.mutate.batchSize</name>

http://git-wip-us.apache.org/repos/asf/ambari/blob/f74ec871/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/configuration/ams-site.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/configuration/ams-site.xml b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/configuration/ams-site.xml
index 04b7806..4237f21 100644
--- a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/configuration/ams-site.xml
+++ b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/configuration/ams-site.xml
@@ -406,6 +406,9 @@
       Set directory for Phoenix spill files. If possible set this to a
       different mount point from the one for hbase.rootdir in embedded mode.
     </description>
+    <value-attributes>
+      <type>directory</type>
+    </value-attributes>
   </property>
   <property>
     <name>timeline.metrics.service.cluster.aggregator.appIds</name>

http://git-wip-us.apache.org/repos/asf/ambari/blob/f74ec871/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/configuration/hbase-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/configuration/hbase-env.xml b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/configuration/hbase-env.xml
index 773a8c9..cabd4fd 100644
--- a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/configuration/hbase-env.xml
+++ b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/configuration/hbase-env.xml
@@ -117,6 +117,9 @@ and the -Xmn ratio (hbase_regionserver_xmn_ratio) exceeds this value.
     <name>hbase_java_io_tmpdir</name>
     <value>/tmp</value>
     <description>Used in hbase-env.sh as HBASE_OPTS=-Djava.io.tmpdir=java_io_tmpdir</description>
+    <value-attributes>
+      <type>directory</type>
+    </value-attributes>
   </property>
 
   <property>

http://git-wip-us.apache.org/repos/asf/ambari/blob/f74ec871/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/configuration/spark-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/configuration/spark-env.xml b/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/configuration/spark-env.xml
index 7374972..24b373b 100644
--- a/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/configuration/spark-env.xml
+++ b/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/configuration/spark-env.xml
@@ -46,11 +46,17 @@
     <name>spark_log_dir</name>
     <value>/var/log/spark</value>
     <description>Spark Log Dir</description>
+    <value-attributes>
+      <type>directory</type>
+    </value-attributes>
   </property>
 
   <property>
     <name>spark_pid_dir</name>
     <value>/var/run/spark</value>
+    <value-attributes>
+      <type>directory</type>
+    </value-attributes>
   </property>
 
   <!-- spark-env.sh -->

http://git-wip-us.apache.org/repos/asf/ambari/blob/f74ec871/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/HBASE/configuration/hbase-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/HBASE/configuration/hbase-env.xml b/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/HBASE/configuration/hbase-env.xml
index 732d0e4..81f392f 100644
--- a/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/HBASE/configuration/hbase-env.xml
+++ b/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/HBASE/configuration/hbase-env.xml
@@ -25,11 +25,17 @@
     <name>hbase_log_dir</name>
     <value>/var/log/hbase</value>
     <description>Log Directories for HBase.</description>
+    <value-attributes>
+      <type>directory</type>
+    </value-attributes>
   </property>
   <property>
     <name>hbase_pid_dir</name>
     <value>/var/run/hbase</value>
     <description>Pid Directory for HBase.</description>
+    <value-attributes>
+      <type>directory</type>
+    </value-attributes>
   </property>
   <property>
     <name>hbase_regionserver_heapsize</name>

http://git-wip-us.apache.org/repos/asf/ambari/blob/f74ec871/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/HBASE/configuration/hbase-site.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/HBASE/configuration/hbase-site.xml b/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/HBASE/configuration/hbase-site.xml
index 84900d1..62b1a2c 100644
--- a/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/HBASE/configuration/hbase-site.xml
+++ b/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/HBASE/configuration/hbase-site.xml
@@ -55,6 +55,9 @@
     than '/tmp' (The '/tmp' directory is often cleared on
     machine restart).
     </description>
+    <value-attributes>
+      <type>directory</type>
+    </value-attributes>
   </property>
   <property>
     <name>hbase.local.dir</name>

http://git-wip-us.apache.org/repos/asf/ambari/blob/f74ec871/ambari-server/src/main/resources/stacks/HDP/2.0.6.GlusterFS/services/HBASE/configuration/hbase-site.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6.GlusterFS/services/HBASE/configuration/hbase-site.xml b/ambari-server/src/main/resources/stacks/HDP/2.0.6.GlusterFS/services/HBASE/configuration/hbase-site.xml
index cf8ddd3..c9a5220 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6.GlusterFS/services/HBASE/configuration/hbase-site.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6.GlusterFS/services/HBASE/configuration/hbase-site.xml
@@ -55,6 +55,9 @@
     than '/tmp' (The '/tmp' directory is often cleared on
     machine restart).
     </description>
+    <value-attributes>
+      <type>directory</type>
+    </value-attributes>
   </property>
   <property>
     <name>hbase.local.dir</name>

http://git-wip-us.apache.org/repos/asf/ambari/blob/f74ec871/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/HBASE/configuration/hbase-site.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/HBASE/configuration/hbase-site.xml b/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/HBASE/configuration/hbase-site.xml
index 8bdd8d5..6354d44 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/HBASE/configuration/hbase-site.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/HBASE/configuration/hbase-site.xml
@@ -59,6 +59,9 @@
     than '/tmp' (The '/tmp' directory is often cleared on
     machine restart).
     </description>
+    <value-attributes>
+      <type>directory</type>
+    </value-attributes>
   </property>
   <property>
     <name>hbase.local.dir</name>

http://git-wip-us.apache.org/repos/asf/ambari/blob/f74ec871/ambari-server/src/main/resources/stacks/HDP/2.2/services/HBASE/configuration/hbase-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2/services/HBASE/configuration/hbase-env.xml b/ambari-server/src/main/resources/stacks/HDP/2.2/services/HBASE/configuration/hbase-env.xml
index d4a6aac..7aafa86 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.2/services/HBASE/configuration/hbase-env.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2/services/HBASE/configuration/hbase-env.xml
@@ -27,6 +27,7 @@
     <display-name>HBase Log Dir Prefix</display-name>
     <description>Log Directories for HBase.</description>
     <value-attributes>
+      <type>directory</type>
       <overridable>false</overridable>
       <editable-only-at-install>true</editable-only-at-install>
     </value-attributes>
@@ -37,6 +38,7 @@
     <display-name>HBase PID Dir</display-name>
     <description>Pid Directory for HBase.</description>
     <value-attributes>
+      <type>directory</type>
       <overridable>false</overridable>
       <editable-only-at-install>true</editable-only-at-install>
     </value-attributes>

http://git-wip-us.apache.org/repos/asf/ambari/blob/f74ec871/ambari-server/src/main/resources/stacks/HDP/2.3.GlusterFS/services/HBASE/configuration/hbase-site.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3.GlusterFS/services/HBASE/configuration/hbase-site.xml b/ambari-server/src/main/resources/stacks/HDP/2.3.GlusterFS/services/HBASE/configuration/hbase-site.xml
index 82b6e09..1895169 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3.GlusterFS/services/HBASE/configuration/hbase-site.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3.GlusterFS/services/HBASE/configuration/hbase-site.xml
@@ -56,6 +56,9 @@
     than '/tmp' (The '/tmp' directory is often cleared on
     machine restart).
     </description>
+    <value-attributes>
+      <type>directory</type>
+    </value-attributes>
   </property>
   <property>
     <name>hbase.local.dir</name>

http://git-wip-us.apache.org/repos/asf/ambari/blob/f74ec871/ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/AMBARI_METRICS/configuration/ams-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/AMBARI_METRICS/configuration/ams-env.xml b/ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/AMBARI_METRICS/configuration/ams-env.xml
index 5dfa769..efb44d1 100644
--- a/ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/AMBARI_METRICS/configuration/ams-env.xml
+++ b/ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/AMBARI_METRICS/configuration/ams-env.xml
@@ -28,24 +28,36 @@
     <value>C:\var\log\ambari-metrics-collector</value>
     <display-name>Metrics Collector log dir</display-name>
     <description>Collector log directory.</description>
+    <value-attributes>
+      <type>directory</type>
+    </value-attributes>
   </property>
   <property>
     <name>metrics_collector_pid_dir</name>
     <value>C:\var\run\ambari-metrics-collector</value>
     <display-name>Metrics Collector pid dir</display-name>
     <description>Collector pid directory.</description>
+    <value-attributes>
+      <type>directory</type>
+    </value-attributes>
   </property>
   <property>
     <name>metrics_monitor_pid_dir</name>
     <value>C:\var\run\ambari-metrics-monitor</value>
     <display-name>Metrics Monitor pid dir</display-name>
     <description>Monitor pid directory.</description>
+    <value-attributes>
+      <type>directory</type>
+    </value-attributes>
   </property>
   <property>
     <name>metrics_monitor_log_dir</name>
     <value>C:\var\log\ambari-metrics-monitor</value>
     <display-name>Metrics Monitor log dir</display-name>
     <description>Monitor log directory.</description>
+    <value-attributes>
+      <type>directory</type>
+    </value-attributes>
   </property>
 
   <property>


[16/50] ambari git commit: AMBARI-14060. Hive View Settings don't persist properly. (Pallav Kulshreshtha via Jaimin)

Posted by nc...@apache.org.
AMBARI-14060. Hive View Settings don't persist properly. (Pallav Kulshreshtha via Jaimin)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/b3cdc4e3
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/b3cdc4e3
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/b3cdc4e3

Branch: refs/heads/branch-dev-patch-upgrade
Commit: b3cdc4e33cc03dba381f269e3b5cd80888bc82b1
Parents: d8a4225
Author: Jaimin Jetly <ja...@hortonworks.com>
Authored: Sun Nov 29 20:18:24 2015 -0800
Committer: Jaimin Jetly <ja...@hortonworks.com>
Committed: Sun Nov 29 20:20:06 2015 -0800

----------------------------------------------------------------------
 .../hive-web/app/components/typeahead-widget.js | 15 ++++++++++++++
 .../ui/hive-web/app/templates/settings.hbs      | 21 +++++++++++---------
 .../ui/hive-web/app/utils/constants.js          |  5 +++++
 3 files changed, 32 insertions(+), 9 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/b3cdc4e3/contrib/views/hive/src/main/resources/ui/hive-web/app/components/typeahead-widget.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/resources/ui/hive-web/app/components/typeahead-widget.js b/contrib/views/hive/src/main/resources/ui/hive-web/app/components/typeahead-widget.js
index f9233a0..5bc0bda 100644
--- a/contrib/views/hive/src/main/resources/ui/hive-web/app/components/typeahead-widget.js
+++ b/contrib/views/hive/src/main/resources/ui/hive-web/app/components/typeahead-widget.js
@@ -31,6 +31,21 @@ export default Typeahead.extend(Ember.I18n.TranslateableProperties, {
 
     if($('.selectize-input')) {$('.selectize-input').addClass( "mozBoxSizeFix" );}
 
+    var currentKeyName = this.get('safeValue');
+    var currentTypehead = $('*[keyname="' + currentKeyName +'"]');
+
+    if (currentTypehead.find($('.selectize-input')).has('.item').length == 0) {
+      currentTypehead.find($('.selectize-input')).addClass("has-options has-items ");
+
+      currentTypehead.find($('.selectized option:selected')).val(currentKeyName);
+      currentTypehead.find($('.selectized option:selected')).text(currentKeyName);
+
+      currentTypehead.find($('.selectize-input input')).css({'opacity': 0 , 'position': 'absolute' , 'left': '-10000px'});
+
+      var itemHtml = '<div data-value=' + currentKeyName + ' class=item >' + currentKeyName + '</div>';
+      currentTypehead.find($('.selectize-input')).append( itemHtml );
+
+    }
   },
 
   removeExcludedObserver: function () {

http://git-wip-us.apache.org/repos/asf/ambari/blob/b3cdc4e3/contrib/views/hive/src/main/resources/ui/hive-web/app/templates/settings.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/resources/ui/hive-web/app/templates/settings.hbs b/contrib/views/hive/src/main/resources/ui/hive-web/app/templates/settings.hbs
index 55b659a..c320a1c 100644
--- a/contrib/views/hive/src/main/resources/ui/hive-web/app/templates/settings.hbs
+++ b/contrib/views/hive/src/main/resources/ui/hive-web/app/templates/settings.hbs
@@ -33,17 +33,20 @@
         <div class="form-group">
           <div class="input-group">
             <div class="input-group-addon">
-              {{typeahead-widget
-                  options=predefinedSettings
-                  excluded=excluded
-                  optionLabelPath="name"
-                  optionValuePath="name"
-                  selection=setting.key
-                  create="addKey"
-              }}
+
+              <div {{bind-attr keyname="setting.key.name"}} class="typeahead-container">
+                {{typeahead-widget
+                    options=predefinedSettings
+                    excluded=excluded
+                    optionLabelPath="name"
+                    optionValuePath="name"
+                    selection=setting.key
+                    safeValue = setting.key.name
+                    create="addKey"
+                }}
+              </div>
             </div>
             <div {{bind-attr class=":input-group-addon setting.valid::has-error"}}>
-
               <div class="setting-input-value">
                 {{#if setting.key.values}}
                   {{select-widget items=setting.key.values

http://git-wip-us.apache.org/repos/asf/ambari/blob/b3cdc4e3/contrib/views/hive/src/main/resources/ui/hive-web/app/utils/constants.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/resources/ui/hive-web/app/utils/constants.js b/contrib/views/hive/src/main/resources/ui/hive-web/app/utils/constants.js
index 199677d..4b9899a 100644
--- a/contrib/views/hive/src/main/resources/ui/hive-web/app/utils/constants.js
+++ b/contrib/views/hive/src/main/resources/ui/hive-web/app/utils/constants.js
@@ -90,6 +90,7 @@ export default Ember.Object.create({
       name: 'hive.tez.container.size',
       validate: helpers.regexes.digits
     },
+
     {
       name: 'hive.prewarm.enabled',
       values: helpers.validationValues.bool
@@ -111,6 +112,10 @@ export default Ember.Object.create({
       values: helpers.validationValues.bool
     },
     {
+      name: 'hive.auto.convert.join',
+      values: helpers.validationValues.bool
+    },
+    {
       name: 'tez.am.resource.memory.mb',
       validate: helpers.regexes.digits
     },


[49/50] ambari git commit: AMBARI-14135 Unable to delete config group in Install Wizard (akovalenko)

Posted by nc...@apache.org.
AMBARI-14135 Unable to delete config group in Install Wizard (akovalenko)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/108fc449
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/108fc449
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/108fc449

Branch: refs/heads/branch-dev-patch-upgrade
Commit: 108fc4499a545ee6f60eca26eefec7f5192c6b07
Parents: c5d7cf6
Author: Aleksandr Kovalenko <ak...@hortonworks.com>
Authored: Tue Dec 1 17:58:45 2015 +0200
Committer: Aleksandr Kovalenko <ak...@hortonworks.com>
Committed: Tue Dec 1 18:07:33 2015 +0200

----------------------------------------------------------------------
 ambari-web/app/mixins/main/service/configs/config_overridable.js | 3 +++
 1 file changed, 3 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/108fc449/ambari-web/app/mixins/main/service/configs/config_overridable.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/mixins/main/service/configs/config_overridable.js b/ambari-web/app/mixins/main/service/configs/config_overridable.js
index 824b704..25d3c62 100644
--- a/ambari-web/app/mixins/main/service/configs/config_overridable.js
+++ b/ambari-web/app/mixins/main/service/configs/config_overridable.js
@@ -133,8 +133,11 @@ App.ConfigOverridable = Em.Mixin.create({
           var newConfigGroup = {
             id: serviceName + "_NEW_" + configGroups.length,
             name: newConfigGroupName,
+            is_default: false,
+            parent_config_group_id: App.ServiceConfigGroup.getParentConfigGroupId(serviceId),
             description: Em.I18n.t('config.group.description.default').format(new Date().toDateString()),
             service_id: serviceId,
+            service_name: serviceId,
             hosts: [],
             desired_configs: []
           };


[39/50] ambari git commit: AMBARI-14119. Ambari fails to start with posgres DB on Suse.(vbrodetskyi)

Posted by nc...@apache.org.
AMBARI-14119. Ambari fails to start with posgres DB on Suse.(vbrodetskyi)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/58f62665
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/58f62665
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/58f62665

Branch: refs/heads/branch-dev-patch-upgrade
Commit: 58f62665379586e044a4801aec253dbf30b57a3e
Parents: 730e6f4
Author: Vitaly Brodetskyi <vb...@hortonworks.com>
Authored: Tue Dec 1 12:41:45 2015 +0200
Committer: Vitaly Brodetskyi <vb...@hortonworks.com>
Committed: Tue Dec 1 12:41:45 2015 +0200

----------------------------------------------------------------------
 .../python/ambari_server/dbConfiguration.py     |  8 +++-
 .../ambari_server/dbConfiguration_linux.py      | 22 +++++++++++
 .../src/test/python/TestAmbariServer.py         | 40 +++++++++++++++++---
 3 files changed, 64 insertions(+), 6 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/58f62665/ambari-server/src/main/python/ambari_server/dbConfiguration.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/python/ambari_server/dbConfiguration.py b/ambari-server/src/main/python/ambari_server/dbConfiguration.py
index 4ef0172..5519a3d 100644
--- a/ambari-server/src/main/python/ambari_server/dbConfiguration.py
+++ b/ambari-server/src/main/python/ambari_server/dbConfiguration.py
@@ -20,7 +20,7 @@ limitations under the License.
 import glob
 import os
 
-from ambari_commons import OSConst
+from ambari_commons import OSConst, OSCheck
 from ambari_commons.exceptions import FatalException
 from ambari_commons.logging_utils import get_silent, print_error_msg, print_info_msg, print_warning_msg, set_silent
 from ambari_commons.os_family_impl import OsFamilyImpl
@@ -124,6 +124,9 @@ class DBMSConfig(object):
       #DB setup should be done last after doing any setup.
       if self._is_local_database():
         self._setup_local_server(properties)
+        # this issue appears only for Suse. Postgres need /var/run/postgresql dir but do not create it
+        if OSCheck.is_suse_family():
+          self._create_postgres_lock_directory()
       else:
         self._setup_remote_server(properties)
     return result
@@ -212,6 +215,9 @@ class DBMSConfig(object):
     #linux_prompt_db_properties(args)
     return False
 
+  def _create_postgres_lock_directory(self):
+    pass
+
   def _setup_local_server(self, properties):
     pass
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/58f62665/ambari-server/src/main/python/ambari_server/dbConfiguration_linux.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/python/ambari_server/dbConfiguration_linux.py b/ambari-server/src/main/python/ambari_server/dbConfiguration_linux.py
index b2e9508..73cc1b2 100644
--- a/ambari-server/src/main/python/ambari_server/dbConfiguration_linux.py
+++ b/ambari-server/src/main/python/ambari_server/dbConfiguration_linux.py
@@ -26,6 +26,7 @@ import socket
 import subprocess
 import sys
 import time
+import pwd
 
 from ambari_commons import OSCheck, OSConst
 from ambari_commons.logging_utils import get_silent, get_verbose, print_error_msg, print_info_msg, print_warning_msg
@@ -414,6 +415,27 @@ class PGConfig(LinuxDBMSConfig):
     print 'Default properties detected. Using built-in database.'
     self._store_local_properties(properties)
 
+  def _create_postgres_lock_directory(self):
+    postgres_user_uid = None
+    try:
+      postgres_user_uid = pwd.getpwnam("postgres").pw_uid
+    except KeyError:
+      print "WARNING: Unable to create /var/run/postgresql directory, because user [postgres] doesn't exist. Potentially," \
+            " postgresql service start can be failed."
+      return
+
+    try:
+      if not os.path.isdir("/var/run/postgresql"):
+        os.mkdir("/var/run/postgresql")
+    except Exception as e:
+      print "WARNING: Unable to create /var/run/postgresql directory. Potentially," \
+            " postgresql service start can be failed."
+      print "Unexpected error: " + str(e)
+      return
+
+    if postgres_user_uid:
+      os.chown("/var/run/postgresql", postgres_user_uid, -1)
+
   def _setup_local_database(self):
     print 'Checking PostgreSQL...'
     (pg_status, retcode, out, err) = PGConfig._check_postgre_up()

http://git-wip-us.apache.org/repos/asf/ambari/blob/58f62665/ambari-server/src/test/python/TestAmbariServer.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/TestAmbariServer.py b/ambari-server/src/test/python/TestAmbariServer.py
index 1ac0648..0042714 100644
--- a/ambari-server/src/test/python/TestAmbariServer.py
+++ b/ambari-server/src/test/python/TestAmbariServer.py
@@ -3068,16 +3068,27 @@ class TestAmbariServer(TestCase):
     pass
 
   @not_for_platform(PLATFORM_WINDOWS)
-  @patch.object(OSCheck, "os_distribution", new = MagicMock(return_value = os_distro_value))
+  @patch("os.path.isdir")
+  @patch("os.mkdir")
+  @patch("os.chown")
+  @patch("pwd.getpwnam")
+  @patch.object(OSCheck, "get_os_family")
   @patch.object(LinuxDBMSConfig, "_setup_remote_server")
   @patch("ambari_server.dbConfiguration_linux.print_info_msg")
   @patch("ambari_server.dbConfiguration_linux.read_password")
   @patch("ambari_server.dbConfiguration_linux.get_validated_string_input")
   @patch("ambari_server.dbConfiguration.get_validated_string_input")
   @patch("ambari_server.serverSetup.get_YN_input")
-  def test_prompt_db_properties_oracle_sid(self, gyni_mock, gvsi_mock, gvsi_2_mock, rp_mock, print_info_msg_mock, srs_mock):
+  def test_prompt_db_properties_postgre_adv(self, gyni_mock, gvsi_mock, gvsi_2_mock, rp_mock, print_info_msg_mock, sls_mock,
+                                            get_os_family_mock, get_pw_nam_mock, chown_mock, mkdir_mock, isdir_mock):
     gyni_mock.return_value = True
     list_of_return_values = ["ambari-server", "ambari", "2", "1521", "localhost", "2"]
+    get_os_family_mock.return_value = OSConst.SUSE_FAMILY
+    pw = MagicMock()
+    pw.setattr('pw_uid', 0)
+    pw.setattr('pw_gid', 0)
+    get_pw_nam_mock.return_value = pw
+
 
     def side_effect(*args, **kwargs):
       return list_of_return_values.pop()
@@ -3118,24 +3129,38 @@ class TestAmbariServer(TestCase):
     self.assertEqual(dbmsConfig.database_username, "ambari")
     self.assertEqual(dbmsConfig.database_password, "bigdata")
 
+    isdir_mock.return_value = False
+
     dbmsConfig.configure_database(props)
 
     self.assertEqual(dbmsConfig.database_username, "ambari-server")
     self.assertEqual(dbmsConfig.database_password, "password")
     self.assertEqual(dbmsConfig.sid_or_sname, "sid")
+    self.assertTrue(chown_mock.called)
+    self.assertTrue(mkdir_mock.called)
     pass
 
   @not_for_platform(PLATFORM_WINDOWS)
-  @patch.object(OSCheck, "os_distribution", new = MagicMock(return_value = os_distro_value))
+  @patch("os.path.isdir")
+  @patch("os.mkdir")
+  @patch("os.chown")
+  @patch("pwd.getpwnam")
+  @patch.object(OSCheck, "get_os_family")
   @patch.object(PGConfig, "_setup_local_server")
   @patch("ambari_server.dbConfiguration_linux.print_info_msg")
   @patch("ambari_server.dbConfiguration_linux.read_password")
   @patch("ambari_server.dbConfiguration_linux.get_validated_string_input")
   @patch("ambari_server.dbConfiguration.get_validated_string_input")
   @patch("ambari_server.serverSetup.get_YN_input")
-  def test_prompt_db_properties_postgre_adv(self, gyni_mock, gvsi_mock, gvsi_2_mock, rp_mock, print_info_msg_mock, sls_mock):
+  def test_prompt_db_properties_postgre_adv(self, gyni_mock, gvsi_mock, gvsi_2_mock, rp_mock, print_info_msg_mock, sls_mock,
+                                            get_os_family_mock, get_pw_nam_mock, chown_mock, mkdir_mock, isdir_mock):
     gyni_mock.return_value = True
     list_of_return_values = ["ambari-server", "ambari", "ambari", "1"]
+    get_os_family_mock.return_value = OSConst.SUSE_FAMILY
+    pw = MagicMock()
+    pw.setattr('pw_uid', 0)
+    pw.setattr('pw_gid', 0)
+    get_pw_nam_mock.return_value = pw
 
     def side_effect(*args, **kwargs):
       return list_of_return_values.pop()
@@ -3458,6 +3483,7 @@ class TestAmbariServer(TestCase):
 
   @not_for_platform(PLATFORM_WINDOWS)
   @patch.object(OSCheck, "os_distribution", new = MagicMock(return_value = os_distro_value))
+  @patch("pwd.getpwnam")
   @patch("ambari_commons.firewall.run_os_command")
   @patch("os.path.exists")
   @patch("os.path.isfile")
@@ -3499,7 +3525,7 @@ class TestAmbariServer(TestCase):
                  store_password_file_mock, get_ambari_properties_1_mock, update_properties_mock,
                  get_YN_input_1_mock, ensure_jdbc_driver_installed_mock,
                  remove_file_mock, isfile_mock, exists_mock,
-                 run_os_command_mock):
+                 run_os_command_mock, get_pw_nam_mock):
     hostname = "localhost"
     db_name = "db_ambari"
     postgres_schema = "sc_ambari"
@@ -3523,6 +3549,10 @@ class TestAmbariServer(TestCase):
     failed = False
     properties = Properties()
 
+    def side_effect(username):
+      raise KeyError("")
+
+    get_pw_nam_mock.side_effect = side_effect
     get_YN_input_mock.return_value = False
     isfile_mock.return_value = False
     verify_setup_allowed_method.return_value = 0


[41/50] ambari git commit: AMBARI-13973 Update metrics collector pom.xml to use HDP2.3.4 Hadoop and HBase bits (dsen)

Posted by nc...@apache.org.
AMBARI-13973 Update metrics collector pom.xml to use HDP2.3.4 Hadoop and HBase bits (dsen)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/2d8f69e7
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/2d8f69e7
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/2d8f69e7

Branch: refs/heads/branch-dev-patch-upgrade
Commit: 2d8f69e75b5ab3b3be8d2dd42a76c046484de7d0
Parents: 6c94580
Author: Dmytro Sen <ds...@apache.org>
Authored: Tue Dec 1 13:09:37 2015 +0200
Committer: Dmytro Sen <ds...@apache.org>
Committed: Tue Dec 1 13:09:37 2015 +0200

----------------------------------------------------------------------
 ambari-metrics/ambari-metrics-timelineservice/pom.xml | 6 +++---
 ambari-metrics/pom.xml                                | 8 ++++----
 2 files changed, 7 insertions(+), 7 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/2d8f69e7/ambari-metrics/ambari-metrics-timelineservice/pom.xml
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-timelineservice/pom.xml b/ambari-metrics/ambari-metrics-timelineservice/pom.xml
index 05af6cf..40232bf 100644
--- a/ambari-metrics/ambari-metrics-timelineservice/pom.xml
+++ b/ambari-metrics/ambari-metrics-timelineservice/pom.xml
@@ -34,9 +34,9 @@
     <!-- Needed for generating FindBugs warnings using parent pom -->
     <!--<yarn.basedir>${project.parent.parent.basedir}</yarn.basedir>-->
     <protobuf.version>2.5.0</protobuf.version>
-    <hadoop.version>2.7.1.2.3.0.0-2557</hadoop.version>
-    <phoenix.version>4.4.0.2.3.0.0-2557</phoenix.version>
-    <hbase.version>1.1.1.2.3.0.0-2557</hbase.version>
+    <hadoop.version>2.7.1.2.3.4.0-3347</hadoop.version>
+    <phoenix.version>4.4.0.2.3.4.0-3347</phoenix.version>
+    <hbase.version>1.1.2.2.3.4.0-3347</hbase.version>
   </properties>
 
   <build>

http://git-wip-us.apache.org/repos/asf/ambari/blob/2d8f69e7/ambari-metrics/pom.xml
----------------------------------------------------------------------
diff --git a/ambari-metrics/pom.xml b/ambari-metrics/pom.xml
index da4712a..302f497 100644
--- a/ambari-metrics/pom.xml
+++ b/ambari-metrics/pom.xml
@@ -36,10 +36,10 @@
     <python.ver>python &gt;= 2.6</python.ver>
     <deb.python.ver>python (&gt;= 2.6)</deb.python.ver>
     <!--TODO change to HDP URL-->
-    <hbase.tar>http://public-repo-1.hortonworks.com/HDP/centos6/2.x/updates/2.3.0.0/tars/hbase-1.1.1.2.3.0.0-2557.tar.gz</hbase.tar>
-    <hbase.folder>hbase-1.1.1.2.3.0.0-2557</hbase.folder>
-    <hadoop.tar>http://public-repo-1.hortonworks.com/HDP/centos6/2.x/updates/2.3.0.0/tars/hadoop-2.7.1.2.3.0.0-2557.tar.gz</hadoop.tar>
-    <hadoop.folder>hadoop-2.7.1.2.3.0.0-2557</hadoop.folder>
+    <hbase.tar>http://private-repo-1.hortonworks.com/HDP/centos6/2.x/updates/2.3.4.0-3347/tars/hbase-1.1.2.2.3.4.0-3347.tar.gz</hbase.tar>
+    <hbase.folder>hbase-1.1.2.2.3.4.0-3347</hbase.folder>
+    <hadoop.tar>http://private-repo-1.hortonworks.com/HDP/centos6/2.x/updates/2.3.4.0-3347/tars/hadoop-2.7.1.2.3.4.0-3347.tar.gz</hadoop.tar>
+    <hadoop.folder>hadoop-2.7.1.2.3.4.0-3347</hadoop.folder>
     <hbase.winpkg.zip>https://msibuilds.blob.core.windows.net/hdp/2.x/2.2.4.2/2/hbase-0.98.4.2.2.4.2-0002-hadoop2.winpkg.zip</hbase.winpkg.zip>
     <hbase.winpkg.folder>hbase-0.98.4.2.2.4.2-0002-hadoop2</hbase.winpkg.folder>
     <hadoop.winpkg.zip>https://msibuilds.blob.core.windows.net/hdp/2.x/2.2.4.2/2/hadoop-2.6.0.2.2.4.2-0002.winpkg.zip</hadoop.winpkg.zip>


[38/50] ambari git commit: AMBARI-14120 : AMS Collector and HBase heap memory config properties are incompatible between 2.1.2 and 2.1.3 when deployed through the same blueprint. (Aravindan Vijayan via swagle)

Posted by nc...@apache.org.
AMBARI-14120 : AMS Collector and HBase heap memory config properties are incompatible between 2.1.2 and 2.1.3 when deployed through the same blueprint. (Aravindan Vijayan via swagle)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/878c61d5
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/878c61d5
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/878c61d5

Branch: refs/heads/branch-dev-patch-upgrade
Commit: 878c61d5c9a5b50a21f76590eecba95d5e1b5bf0
Parents: 278d4c9
Author: Siddharth Wagle <sw...@hortonworks.com>
Authored: Mon Nov 30 22:30:41 2015 -0800
Committer: Siddharth Wagle <sw...@hortonworks.com>
Committed: Mon Nov 30 22:30:41 2015 -0800

----------------------------------------------------------------------
 .../server/upgrade/UpgradeCatalog213.java       |  97 ------------------
 .../0.1.0/configuration/ams-env.xml             |   2 +-
 .../0.1.0/configuration/ams-hbase-env.xml       |   8 +-
 .../0.1.0/package/scripts/functions.py          |  16 ++-
 .../0.1.0/package/scripts/params.py             |  24 +++--
 .../server/upgrade/UpgradeCatalog213Test.java   | 100 -------------------
 6 files changed, 33 insertions(+), 214 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/878c61d5/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog213.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog213.java b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog213.java
index 3b61fa1..b565324 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog213.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog213.java
@@ -1012,36 +1012,6 @@ public class UpgradeCatalog213 extends AbstractUpgradeCatalog {
 
       if (clusterMap != null && !clusterMap.isEmpty()) {
         for (final Cluster cluster : clusterMap.values()) {
-          Config amsEnv = cluster.getDesiredConfigByType(AMS_ENV);
-          if (amsEnv != null) {
-            Map<String, String> amsEnvProperties = amsEnv.getProperties();
-
-            String metrics_collector_heapsize = amsEnvProperties.get("metrics_collector_heapsize");
-            String content = amsEnvProperties.get("content");
-            Map<String, String> newProperties = new HashMap<>();
-            newProperties.put("metrics_collector_heapsize", memoryToIntMb(metrics_collector_heapsize));
-            newProperties.put("content", updateAmsEnvContent(content));
-            updateConfigurationPropertiesForCluster(cluster, AMS_ENV, newProperties, true, true);
-          }
-          Config amsHbaseEnv = cluster.getDesiredConfigByType(AMS_HBASE_ENV);
-          if (amsHbaseEnv != null) {
-            Map<String, String> amsHbaseEnvProperties = amsHbaseEnv.getProperties();
-            String hbase_regionserver_heapsize = amsHbaseEnvProperties.get("hbase_regionserver_heapsize");
-            String regionserver_xmn_size = amsHbaseEnvProperties.get("regionserver_xmn_size");
-            String hbase_master_xmn_size = amsHbaseEnvProperties.get("hbase_master_xmn_size");
-            String hbase_master_maxperm_size = amsHbaseEnvProperties.get("hbase_master_maxperm_size");
-            String hbase_master_heapsize = amsHbaseEnvProperties.get("hbase_master_heapsize");
-            String content = amsHbaseEnvProperties.get("content");
-
-            Map<String, String> newProperties = new HashMap<>();
-            newProperties.put("hbase_regionserver_heapsize", memoryToIntMb(hbase_regionserver_heapsize));
-            newProperties.put("regionserver_xmn_size", memoryToIntMb(regionserver_xmn_size));
-            newProperties.put("hbase_master_xmn_size", memoryToIntMb(hbase_master_xmn_size));
-            newProperties.put("hbase_master_maxperm_size", memoryToIntMb(hbase_master_maxperm_size));
-            newProperties.put("hbase_master_heapsize", memoryToIntMb(hbase_master_heapsize));
-            newProperties.put("content", updateAmsHbaseEnvContent(content));
-            updateConfigurationPropertiesForCluster(cluster, AMS_HBASE_ENV, newProperties, true, true);
-          }
           Config amsSite = cluster.getDesiredConfigByType(AMS_SITE);
           if (amsSite != null) {
             Map<String, String> newProperties = new HashMap<>();
@@ -1158,44 +1128,6 @@ public class UpgradeCatalog213 extends AbstractUpgradeCatalog {
     }
   }
 
-  protected String updateAmsEnvContent(String oldContent) {
-    if (oldContent == null) {
-      return null;
-    }
-    String regSearch = "export\\s*AMS_COLLECTOR_HEAPSIZE\\s*=\\s*\\{\\{metrics_collector_heapsize\\}\\}";
-    String replacement = "export AMS_COLLECTOR_HEAPSIZE={{metrics_collector_heapsize}}m";
-    return oldContent.replaceAll(regSearch, replacement);
-  }
-
-  protected String updateAmsHbaseEnvContent(String content) {
-    if (content == null) {
-      return null;
-    }
-
-    String regSearch = "\\{\\{hbase_heapsize\\}\\}";
-    String replacement = "{{hbase_heapsize}}m";
-    content = content.replaceAll(regSearch, replacement);
-    regSearch = "\\{\\{hbase_master_maxperm_size\\}\\}";
-    replacement = "{{hbase_master_maxperm_size}}m";
-    content = content.replaceAll(regSearch, replacement);
-    regSearch = "\\{\\{hbase_master_xmn_size\\}\\}";
-    replacement = "{{hbase_master_xmn_size}}m";
-    content = content.replaceAll(regSearch, replacement);
-    regSearch = "\\{\\{regionserver_xmn_size\\}\\}";
-    replacement = "{{regionserver_xmn_size}}m";
-    content = content.replaceAll(regSearch, replacement);
-    regSearch = "\\{\\{regionserver_heapsize\\}\\}";
-    replacement = "{{regionserver_heapsize}}m";
-    content = content.replaceAll(regSearch, replacement);
-    regSearch = "export HBASE_HEAPSIZE=";
-    replacement = "#export HBASE_HEAPSIZE=";
-    content = content.replaceAll(regSearch, replacement);
-    content += "\n" +
-      "# The maximum amount of heap to use for hbase shell.\n" +
-      "export HBASE_SHELL_OPTS=\"-Xmx256m\"\n";
-    return content;
-  }
-
   protected String updateHiveEnvContent(String hiveEnvContent) {
     if(hiveEnvContent == null) {
       return null;
@@ -1255,33 +1187,4 @@ public class UpgradeCatalog213 extends AbstractUpgradeCatalog {
       } // else -- no special client-configuration is necessary.
     }
   }
-
-  private String memoryToIntMb(String memorySize) {
-    if (memorySize == null) {
-      return "0";
-    }
-    Integer value = 0;
-    try {
-      value = Integer.parseInt(memorySize.replaceAll("\\D+", ""));
-    } catch (NumberFormatException ex) {
-      LOG.error(ex.getMessage());
-    }
-    char unit = memorySize.toUpperCase().charAt(memorySize.length() - 1);
-    // Recalculate memory size to Mb
-    switch (unit) {
-      case 'K':
-        value /= 1024;
-        break;
-      case 'B':
-        value /= (1024*1024);
-        break;
-      case 'G':
-        value *= 1024;
-        break;
-      case 'T':
-        value *= 1024*1024;
-        break;
-    }
-    return value.toString();
-  }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/878c61d5/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/configuration/ams-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/configuration/ams-env.xml b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/configuration/ams-env.xml
index 3335c90..41d8912 100644
--- a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/configuration/ams-env.xml
+++ b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/configuration/ams-env.xml
@@ -86,7 +86,7 @@ export AMS_MONITOR_PID_DIR={{ams_monitor_pid_dir}}
 export AMS_HBASE_PID_DIR={{hbase_pid_dir}}
 
 # AMS Collector heapsize
-export AMS_COLLECTOR_HEAPSIZE={{metrics_collector_heapsize}}m
+export AMS_COLLECTOR_HEAPSIZE={{metrics_collector_heapsize}}
 
 # AMS Collector options
 export AMS_COLLECTOR_OPTS="-Djava.library.path=/usr/lib/ams-hbase/lib/hadoop-native"

http://git-wip-us.apache.org/repos/asf/ambari/blob/878c61d5/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/configuration/ams-hbase-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/configuration/ams-hbase-env.xml b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/configuration/ams-hbase-env.xml
index 33bd713..d475932 100644
--- a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/configuration/ams-hbase-env.xml
+++ b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/configuration/ams-hbase-env.xml
@@ -178,11 +178,11 @@ export SERVER_GC_OPTS="-verbose:gc -XX:+PrintGCDetails -XX:+PrintGCDateStamps -X
 # export HBASE_JMX_BASE="-Dcom.sun.management.jmxremote.ssl=false -Dcom.sun.management.jmxremote.authenticate=false"
 
 {% if java_version &lt; 8 %}
-export HBASE_MASTER_OPTS=" -XX:PermSize=64m -XX:MaxPermSize={{hbase_master_maxperm_size}}m -Xms{{hbase_heapsize}}m -Xmx{{hbase_heapsize}}m -Xmn{{hbase_master_xmn_size}}m -XX:CMSInitiatingOccupancyFraction=70 -XX:+UseCMSInitiatingOccupancyOnly"
-export HBASE_REGIONSERVER_OPTS="-XX:MaxPermSize=128m -Xmn{{regionserver_xmn_size}}m -XX:CMSInitiatingOccupancyFraction=70 -XX:+UseCMSInitiatingOccupancyOnly -Xms{{regionserver_heapsize}}m -Xmx{{regionserver_heapsize}}m"
+export HBASE_MASTER_OPTS=" -XX:PermSize=64m -XX:MaxPermSize={{hbase_master_maxperm_size}} -Xms{{hbase_heapsize}} -Xmx{{hbase_heapsize}} -Xmn{{hbase_master_xmn_size}} -XX:CMSInitiatingOccupancyFraction=70 -XX:+UseCMSInitiatingOccupancyOnly"
+export HBASE_REGIONSERVER_OPTS="-XX:MaxPermSize=128m -Xmn{{regionserver_xmn_size}} -XX:CMSInitiatingOccupancyFraction=70 -XX:+UseCMSInitiatingOccupancyOnly -Xms{{regionserver_heapsize}} -Xmx{{regionserver_heapsize}}"
 {% else %}
-export HBASE_MASTER_OPTS=" -Xms{{hbase_heapsize}}m -Xmx{{hbase_heapsize}}m -Xmn{{hbase_master_xmn_size}}m -XX:CMSInitiatingOccupancyFraction=70 -XX:+UseCMSInitiatingOccupancyOnly"
-export HBASE_REGIONSERVER_OPTS=" -Xmn{{regionserver_xmn_size}}m -XX:CMSInitiatingOccupancyFraction=70 -XX:+UseCMSInitiatingOccupancyOnly -Xms{{regionserver_heapsize}}m -Xmx{{regionserver_heapsize}}m"
+export HBASE_MASTER_OPTS=" -Xms{{hbase_heapsize}} -Xmx{{hbase_heapsize}} -Xmn{{hbase_master_xmn_size}} -XX:CMSInitiatingOccupancyFraction=70 -XX:+UseCMSInitiatingOccupancyOnly"
+export HBASE_REGIONSERVER_OPTS=" -Xmn{{regionserver_xmn_size}} -XX:CMSInitiatingOccupancyFraction=70 -XX:+UseCMSInitiatingOccupancyOnly -Xms{{regionserver_heapsize}} -Xmx{{regionserver_heapsize}}"
 {% endif %}
 
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/878c61d5/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/functions.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/functions.py b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/functions.py
index 01315e5..140c24c 100644
--- a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/functions.py
+++ b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/functions.py
@@ -25,21 +25,27 @@ import datetime
 
 from resource_management.core.shell import checked_call
 
-def calc_xmn_from_xms(heapsize, xmn_percent, xmn_max):
+def calc_xmn_from_xms(heapsize_str, xmn_percent, xmn_max):
   """
-  @param heapsize: int (e.g 1000)
+  @param heapsize: str (e.g 1000m)
   @param xmn_percent: float (e.g 0.2)
   @param xmn_max: integer (e.g 512)
   """
+  heapsize = int(re.search('\d+', str(heapsize_str)).group(0))
+  heapsize_unit = re.search('\D+', str(heapsize_str)).group(0)
+
   xmn_val = int(math.floor(heapsize*xmn_percent))
   xmn_val -= xmn_val % 8
   
   result_xmn_val = xmn_max if xmn_val > xmn_max else xmn_val
-  return result_xmn_val
+  return str(result_xmn_val) + heapsize_unit
 
 def trim_heap_property(property, m_suffix = "m"):
-
   if property and property.endswith(m_suffix):
     property = property[:-1]
-
   return property
+
+def check_append_heap_property(property, m_suffix = "m"):
+  if property and not property.endswith(m_suffix):
+    property += m_suffix
+  return property
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/878c61d5/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/params.py b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/params.py
index 68a3daa..8b99f96 100644
--- a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/params.py
@@ -19,6 +19,7 @@ limitations under the License.
 """
 
 from functions import calc_xmn_from_xms
+from functions import check_append_heap_property
 from functions import trim_heap_property
 
 from resource_management import *
@@ -34,6 +35,15 @@ else:
 config = Script.get_config()
 exec_tmp_dir = Script.get_tmp_dir()
 
+def get_combined_memory_mb(value1, value2):
+  try:
+    part1 = int(value1.strip()[:-1]) if value1.lower().strip()[-1:] == 'm' else int(value1)
+    part2 = int(value2.strip()[:-1]) if value2.lower().strip()[-1:] == 'm' else int(value2)
+    return str(part1 + part2) + 'm'
+  except:
+    return None
+pass
+
 #AMBARI_METRICS data
 ams_pid_dir = status_params.ams_collector_pid_dir
 
@@ -99,9 +109,9 @@ master_heapsize = config['configurations']['ams-hbase-env']['hbase_master_heapsi
 regionserver_heapsize = config['configurations']['ams-hbase-env']['hbase_regionserver_heapsize']
 
 # Check if hbase java options already have appended "m". If Yes, remove the trailing m.
-metrics_collector_heapsize = int(trim_heap_property(str(metrics_collector_heapsize), "m"))
-master_heapsize = int(trim_heap_property(str(master_heapsize), "m"))
-regionserver_heapsize = int(trim_heap_property(str(regionserver_heapsize), "m"))
+metrics_collector_heapsize = check_append_heap_property(str(metrics_collector_heapsize), "m")
+master_heapsize = check_append_heap_property(str(master_heapsize), "m")
+regionserver_heapsize = check_append_heap_property(str(regionserver_heapsize), "m")
 
 regionserver_xmn_max = default('/configurations/ams-hbase-env/hbase_regionserver_xmn_max', None)
 if regionserver_xmn_max:
@@ -116,13 +126,13 @@ hbase_master_xmn_size = config['configurations']['ams-hbase-env']['hbase_master_
 hbase_master_maxperm_size = config['configurations']['ams-hbase-env']['hbase_master_maxperm_size']
 
 # Check if hbase java options already have appended "m". If Yes, remove the trailing m.
-hbase_master_maxperm_size = int(trim_heap_property(str(hbase_master_maxperm_size), "m"))
-hbase_master_xmn_size = int(trim_heap_property(str(hbase_master_xmn_size), "m"))
-regionserver_xmn_size = int(trim_heap_property(str(regionserver_xmn_size), "m"))
+hbase_master_maxperm_size = check_append_heap_property(str(hbase_master_maxperm_size), "m")
+hbase_master_xmn_size = check_append_heap_property(str(hbase_master_xmn_size), "m")
+regionserver_xmn_size = check_append_heap_property(str(regionserver_xmn_size), "m")
 
 # Choose heap size for embedded mode as sum of master + regionserver
 if not is_hbase_distributed:
-  hbase_heapsize = master_heapsize + regionserver_heapsize
+  hbase_heapsize = get_combined_memory_mb(master_heapsize, regionserver_heapsize)
   if hbase_heapsize is None:
     hbase_heapsize = master_heapsize
 else:

http://git-wip-us.apache.org/repos/asf/ambari/blob/878c61d5/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog213Test.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog213Test.java b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog213Test.java
index 7cd835e..0e7ea4a 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog213Test.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog213Test.java
@@ -590,106 +590,6 @@ public class UpgradeCatalog213Test {
   }
 
   @Test
-  public void testUpdateAmsHbaseEnvContent() throws NoSuchMethodException, InvocationTargetException, IllegalAccessException {
-    Method updateAmsHbaseEnvContent = UpgradeCatalog213.class.getDeclaredMethod("updateAmsHbaseEnvContent", String.class);
-    UpgradeCatalog213 upgradeCatalog213 = new UpgradeCatalog213(injector);
-    String oldContent = "export HBASE_CLASSPATH=${HBASE_CLASSPATH}\n" +
-      "\n" +
-      "# The maximum amount of heap to use, in MB. Default is 1000.\n" +
-      "export HBASE_HEAPSIZE={{hbase_heapsize}}\n" +
-      "\n" +
-      "{% if java_version &lt; 8 %}\n" +
-      "export HBASE_MASTER_OPTS=\" -XX:PermSize=64m -XX:MaxPermSize={{hbase_master_maxperm_size}} -Xms{{hbase_heapsize}} -Xmx{{hbase_heapsize}} -Xmn{{hbase_master_xmn_size}} -XX:CMSInitiatingOccupancyFraction=70 -XX:+UseCMSInitiatingOccupancyOnly\"\n" +
-      "export HBASE_REGIONSERVER_OPTS=\"-XX:MaxPermSize=128m -Xmn{{regionserver_xmn_size}} -XX:CMSInitiatingOccupancyFraction=70 -XX:+UseCMSInitiatingOccupancyOnly -Xms{{regionserver_heapsize}} -Xmx{{regionserver_heapsize}}\"\n" +
-      "{% else %}\n" +
-      "export HBASE_MASTER_OPTS=\" -Xms{{hbase_heapsize}} -Xmx{{hbase_heapsize}} -Xmn{{hbase_master_xmn_size}} -XX:CMSInitiatingOccupancyFraction=70 -XX:+UseCMSInitiatingOccupancyOnly\"\n" +
-      "export HBASE_REGIONSERVER_OPTS=\" -Xmn{{regionserver_xmn_size}} -XX:CMSInitiatingOccupancyFraction=70 -XX:+UseCMSInitiatingOccupancyOnly -Xms{{regionserver_heapsize}} -Xmx{{regionserver_heapsize}}\"\n" +
-      "{% endif %}\n";
-    String expectedContent = "export HBASE_CLASSPATH=${HBASE_CLASSPATH}\n" +
-      "\n" +
-      "# The maximum amount of heap to use, in MB. Default is 1000.\n" +
-      "#export HBASE_HEAPSIZE={{hbase_heapsize}}m\n" +
-      "\n" +
-      "{% if java_version &lt; 8 %}\n" +
-      "export HBASE_MASTER_OPTS=\" -XX:PermSize=64m -XX:MaxPermSize={{hbase_master_maxperm_size}}m -Xms{{hbase_heapsize}}m -Xmx{{hbase_heapsize}}m -Xmn{{hbase_master_xmn_size}}m -XX:CMSInitiatingOccupancyFraction=70 -XX:+UseCMSInitiatingOccupancyOnly\"\n" +
-      "export HBASE_REGIONSERVER_OPTS=\"-XX:MaxPermSize=128m -Xmn{{regionserver_xmn_size}}m -XX:CMSInitiatingOccupancyFraction=70 -XX:+UseCMSInitiatingOccupancyOnly -Xms{{regionserver_heapsize}}m -Xmx{{regionserver_heapsize}}m\"\n" +
-      "{% else %}\n" +
-      "export HBASE_MASTER_OPTS=\" -Xms{{hbase_heapsize}}m -Xmx{{hbase_heapsize}}m -Xmn{{hbase_master_xmn_size}}m -XX:CMSInitiatingOccupancyFraction=70 -XX:+UseCMSInitiatingOccupancyOnly\"\n" +
-      "export HBASE_REGIONSERVER_OPTS=\" -Xmn{{regionserver_xmn_size}}m -XX:CMSInitiatingOccupancyFraction=70 -XX:+UseCMSInitiatingOccupancyOnly -Xms{{regionserver_heapsize}}m -Xmx{{regionserver_heapsize}}m\"\n" +
-      "{% endif %}\n\n" +
-      "# The maximum amount of heap to use for hbase shell.\n" +
-      "export HBASE_SHELL_OPTS=\"-Xmx256m\"\n";
-    String result = (String) updateAmsHbaseEnvContent.invoke(upgradeCatalog213, oldContent);
-    Assert.assertEquals(expectedContent, result);
-  }
-
-  @Test
-  public void testUpdateAmsEnvContent() throws NoSuchMethodException, InvocationTargetException, IllegalAccessException {
-    Method updateAmsEnvContent = UpgradeCatalog213.class.getDeclaredMethod("updateAmsEnvContent", String.class);
-    UpgradeCatalog213 upgradeCatalog213 = new UpgradeCatalog213(injector);
-    String oldContent = "# AMS Collector heapsize\n" +
-      "export AMS_COLLECTOR_HEAPSIZE={{metrics_collector_heapsize}}\n";
-    String expectedContent = "# AMS Collector heapsize\n" +
-      "export AMS_COLLECTOR_HEAPSIZE={{metrics_collector_heapsize}}m\n";
-    String result = (String) updateAmsEnvContent.invoke(upgradeCatalog213, oldContent);
-    Assert.assertEquals(expectedContent, result);
-  }
-
-  @Test
-  public void testUpdateAmsConfigs() throws Exception {
-    EasyMockSupport easyMockSupport = new EasyMockSupport();
-    final AmbariManagementController mockAmbariManagementController = easyMockSupport.createNiceMock(AmbariManagementController.class);
-    final ConfigHelper mockConfigHelper = easyMockSupport.createMock(ConfigHelper.class);
-
-    final Clusters mockClusters = easyMockSupport.createStrictMock(Clusters.class);
-    final Cluster mockClusterExpected = easyMockSupport.createNiceMock(Cluster.class);
-    final Map<String, String> propertiesAmsEnv = new HashMap<String, String>() {
-      {
-        put("metrics_collector_heapsize", "512m");
-      }
-    };
-
-    final Map<String, String> propertiesAmsHbaseEnv = new HashMap<String, String>() {
-      {
-        put("hbase_regionserver_heapsize", "512m");
-        put("regionserver_xmn_size", "512m");
-        put("hbase_master_xmn_size", "512m");
-        put("hbase_master_maxperm_size", "512");
-      }
-    };
-
-    final Config mockAmsEnv = easyMockSupport.createNiceMock(Config.class);
-    final Config mockAmsHbaseEnv = easyMockSupport.createNiceMock(Config.class);
-
-    final Injector mockInjector = Guice.createInjector(new AbstractModule() {
-      @Override
-      protected void configure() {
-        bind(AmbariManagementController.class).toInstance(mockAmbariManagementController);
-        bind(ConfigHelper.class).toInstance(mockConfigHelper);
-        bind(Clusters.class).toInstance(mockClusters);
-        bind(EntityManager.class).toInstance(entityManager);
-
-        bind(DBAccessor.class).toInstance(createNiceMock(DBAccessor.class));
-        bind(OsFamily.class).toInstance(createNiceMock(OsFamily.class));
-      }
-    });
-
-    expect(mockAmbariManagementController.getClusters()).andReturn(mockClusters).once();
-    expect(mockClusters.getClusters()).andReturn(new HashMap<String, Cluster>() {{
-      put("normal", mockClusterExpected);
-    }}).once();
-
-    expect(mockClusterExpected.getDesiredConfigByType("ams-env")).andReturn(mockAmsEnv).atLeastOnce();
-    expect(mockClusterExpected.getDesiredConfigByType("ams-hbase-env")).andReturn(mockAmsHbaseEnv).atLeastOnce();
-    expect(mockAmsEnv.getProperties()).andReturn(propertiesAmsEnv).atLeastOnce();
-    expect(mockAmsHbaseEnv.getProperties()).andReturn(propertiesAmsHbaseEnv).atLeastOnce();
-
-    easyMockSupport.replayAll();
-    mockInjector.getInstance(UpgradeCatalog213.class).updateAMSConfigs();
-    easyMockSupport.verifyAll();
-  }
-
-  @Test
   public void testAmsSiteUpdateConfigs() throws Exception{
 
     Map<String, String> oldPropertiesAmsSite = new HashMap<String, String>() {


[46/50] ambari git commit: AMBARI-14128. Improve Em.computed macros (onechiporenko)

Posted by nc...@apache.org.
AMBARI-14128. Improve Em.computed macros (onechiporenko)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/fd29b081
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/fd29b081
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/fd29b081

Branch: refs/heads/branch-dev-patch-upgrade
Commit: fd29b081504d22715d46382484b8795daf8b2798
Parents: f74ec87
Author: Oleg Nechiporenko <on...@apache.org>
Authored: Tue Dec 1 13:55:01 2015 +0200
Committer: Oleg Nechiporenko <on...@apache.org>
Committed: Tue Dec 1 14:06:31 2015 +0200

----------------------------------------------------------------------
 ambari-web/app/utils/ember_computed.js       | 395 +++++++++++++++++++++-
 ambari-web/test/utils/ember_computed_test.js | 160 +++++++++
 2 files changed, 549 insertions(+), 6 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/fd29b081/ambari-web/app/utils/ember_computed.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/utils/ember_computed.js b/ambari-web/app/utils/ember_computed.js
index 03ac4fc..a75fc50 100644
--- a/ambari-web/app/utils/ember_computed.js
+++ b/ambari-web/app/utils/ember_computed.js
@@ -41,8 +41,7 @@ function getProperties(self, propertyNames) {
     propertyName = shouldBeInverted ? propertyName.substr(1) : propertyName;
     var isApp = propertyName.startsWith('App.');
     var name = isApp ? propertyName.replace('App.', '') : propertyName;
-    var context = isApp ? App : self;
-    var value = get(context, name);
+    var value = isApp ? App.get(name) : get(self, name);
     value = shouldBeInverted ? !value : value;
     ret[propertyName] = value;
   }
@@ -60,8 +59,7 @@ function getProperties(self, propertyNames) {
 function smartGet(self, propertyName) {
   var isApp = propertyName.startsWith('App.');
   var name = isApp ? propertyName.replace('App.', '') : propertyName;
-  var context = isApp ? App : self;
-  return get(context, name)
+  return  isApp ? App.get(name) : get(self, name);
 }
 
 /**
@@ -147,6 +145,15 @@ computed.equal = function (dependentKey, value) {
 /**
  * A computed property that returns true if the provided dependent property is not equal to the given value
  * App.*-keys are supported
+ * <pre>
+ * var o = Em.Object.create({
+ *  p1: 'a',
+ *  p2: Em.computed.notEqual('p1', 'a')
+ * });
+ * console.log(o.get('p2')); // false
+ * o.set('p1', 'b');
+ * console.log(o.get('p2')); // true
+ * </pre>
  *
  * @method notEqual
  * @param {string} dependentKey
@@ -162,6 +169,16 @@ computed.notEqual = function (dependentKey, value) {
 /**
  * A computed property that returns true if provided dependent properties are equal to the each other
  * App.*-keys are supported
+ * <pre>
+ * var o = Em.Object.create({
+ *  p1: 'a',
+ *  p2: 'b',
+ *  p3: Em.computed.equalProperties('p1', 'p2')
+ * });
+ * console.log(o.get('p3')); // false
+ * o.set('p1', 'b');
+ * console.log(o.get('p3')); // true
+ * </pre>
  *
  * @method equalProperties
  * @param {string} dependentKey1
@@ -177,6 +194,16 @@ computed.equalProperties = function (dependentKey1, dependentKey2) {
 /**
  * A computed property that returns true if provided dependent properties are not equal to the each other
  * App.*-keys are supported
+ * <pre>
+ * var o = Em.Object.create({
+ *  p1: 'a',
+ *  p2: 'b',
+ *  p3: Em.computed.notEqualProperties('p1', 'p2')
+ * });
+ * console.log(o.get('p3')); // true
+ * o.set('p1', 'b');
+ * console.log(o.get('p3')); // false
+ * </pre>
  *
  * @method notEqualProperties
  * @param {string} dependentKey1
@@ -241,6 +268,15 @@ computed.rejectMany = function (collectionKey, propertyName, valuesToReject) {
 /**
  * A computed property that returns trueValue if dependent value is true and falseValue otherwise
  * App.*-keys are supported
+ * <pre>
+ * var o = Em.Object.create({
+ *  p1: true,
+ *  p2: Em.computed.ifThenElse('p1', 'abc', 'cba')
+ * });
+ * console.log(o.get('p2')); // 'abc'
+ * o.set('p1', false);
+ * console.log(o.get('p2')); // 'cba'
+ * </pre>
  *
  * @method ifThenElse
  * @param {string} dependentKey
@@ -259,6 +295,17 @@ computed.ifThenElse = function (dependentKey, trueValue, falseValue) {
  * Takes any number of arguments
  * Returns true if all of them are truly, false - otherwise
  * App.*-keys are supported
+ * <pre>
+ * var o = Em.Object.create({
+ *  p1: true,
+ *  p2: true,
+ *  p3: true,
+ *  p4: Em.computed.and('p1', 'p2', 'p3')
+ * });
+ * console.log(o.get('p4')); // true
+ * o.set('p1', false);
+ * console.log(o.get('p4')); // false
+ * </pre>
  *
  * @method and
  * @param {...string} dependentKeys
@@ -280,6 +327,17 @@ computed.and = generateComputedWithProperties(function (properties) {
  * Takes any number of arguments
  * Returns true if at least one of them is truly, false - otherwise
  * App.*-keys are supported
+ * <pre>
+ * var o = Em.Object.create({
+ *  p1: false,
+ *  p2: false,
+ *  p3: false,
+ *  p4: Em.computed.or('p1', 'p2', 'p3')
+ * });
+ * console.log(o.get('p4')); // false
+ * o.set('p1', true);
+ * console.log(o.get('p4')); // true
+ * </pre>
  *
  * @method or
  * @param {...string} dependentKeys
@@ -300,6 +358,17 @@ computed.or = generateComputedWithProperties(function (properties) {
  * A computed property that returns sum on the dependent properties values
  * Takes any number of arguments
  * App.*-keys are supported
+ * <pre>
+ * var o = Em.Object.create({
+ *  p1: 1,
+ *  p2: 2,
+ *  p3: 3,
+ *  p4: Em.computed.sumProperties('p1', 'p2', 'p3')
+ * });
+ * console.log(o.get('p4')); // 6
+ * o.set('p1', 2);
+ * console.log(o.get('p4')); // 7
+ * </pre>
  *
  * @method sumProperties
  * @param {...string} dependentKeys
@@ -318,6 +387,17 @@ computed.sumProperties = generateComputedWithProperties(function (properties) {
 /**
  * A computed property that returns true if dependent value is greater or equal to the needed value
  * App.*-keys are supported
+ * <pre>
+ * var o = Em.Object.create({
+ *  p1: 4,
+ *  p2: Em.computed.gte('p1', 1)
+ * });
+ * console.log(o.get('p2')); // true
+ * o.set('p1', 4);
+ * console.log(o.get('p2')); // true
+ * o.set('p1', 5);
+ * console.log(o.get('p2')); // false
+ * </pre>
  *
  * @method gte
  * @param {string} dependentKey
@@ -333,6 +413,18 @@ computed.gte = function (dependentKey, value) {
 /**
  * A computed property that returns true if first dependent property is greater or equal to the second dependent property
  * App.*-keys are supported
+ * <pre>
+ * var o = Em.Object.create({
+ *  p1: 4,
+ *  p2: 1,
+ *  p3: Em.computed.gteProperties('p1', 'p2')
+ * });
+ * console.log(o.get('p3')); // true
+ * o.set('p2', 4);
+ * console.log(o.get('p3')); // true
+ * o.set('p2', 5);
+ * console.log(o.get('p3')); // false
+ * </pre>
  *
  * @method gteProperties
  * @param {string} dependentKey1
@@ -348,6 +440,17 @@ computed.gteProperties = function (dependentKey1, dependentKey2) {
 /**
  * A computed property that returns true if dependent property is less or equal to the needed value
  * App.*-keys are supported
+ * <pre>
+ * var o = Em.Object.create({
+ *  p1: 4,
+ *  p2: Em.computed.lte('p1', 1)
+ * });
+ * console.log(o.get('p2')); // false
+ * o.set('p1', 4);
+ * console.log(o.get('p2')); // true
+ * o.set('p1', 5);
+ * console.log(o.get('p2')); // true
+ * </pre>
  *
  * @method lte
  * @param {string} dependentKey
@@ -363,6 +466,18 @@ computed.lte = function (dependentKey, value) {
 /**
  * A computed property that returns true if first dependent property is less or equal to the second dependent property
  * App.*-keys are supported
+ * <pre>
+ * var o = Em.Object.create({
+ *  p1: 4,
+ *  p2: 1,
+ *  p3: Em.computed.lteProperties('p1', 'p2')
+ * });
+ * console.log(o.get('p3')); // false
+ * o.set('p2', 4);
+ * console.log(o.get('p3')); // true
+ * o.set('p2', 5);
+ * console.log(o.get('p3')); // true
+ * </pre>
  *
  * @method lteProperties
  * @param {string} dependentKey1
@@ -378,6 +493,17 @@ computed.lteProperties = function (dependentKey1, dependentKey2) {
 /**
  * A computed property that returns true if dependent value is greater than the needed value
  * App.*-keys are supported
+ * <pre>
+ * var o = Em.Object.create({
+ *  p1: 4,
+ *  p2: Em.computed.gt('p1', 1)
+ * });
+ * console.log(o.get('p2')); // true
+ * o.set('p1', 4);
+ * console.log(o.get('p2')); // false
+ * o.set('p1', 5);
+ * console.log(o.get('p2')); // false
+ * </pre>
  *
  * @method gt
  * @param {string} dependentKey
@@ -393,6 +519,18 @@ computed.gt = function (dependentKey, value) {
 /**
  * A computed property that returns true if first dependent property is greater than the second dependent property
  * App.*-keys are supported
+ * <pre>
+ * var o = Em.Object.create({
+ *  p1: 4,
+ *  p2: 1,
+ *  p3: Em.computed.gteProperties('p1', 'p2')
+ * });
+ * console.log(o.get('p3')); // true
+ * o.set('p2', 4);
+ * console.log(o.get('p3')); // false
+ * o.set('p2', 5);
+ * console.log(o.get('p3')); // false
+ * </pre>
  *
  * @method gtProperties
  * @param {string} dependentKey1
@@ -408,6 +546,17 @@ computed.gtProperties = function (dependentKey1, dependentKey2) {
 /**
  * A computed property that returns true if dependent value is less than the needed value
  * App.*-keys are supported
+ * <pre>
+ * var o = Em.Object.create({
+ *  p1: 4,
+ *  p2: Em.computed.lt('p1', 1)
+ * });
+ * console.log(o.get('p2')); // false
+ * o.set('p1', 4);
+ * console.log(o.get('p2')); // false
+ * o.set('p1', 5);
+ * console.log(o.get('p2')); // true
+ * </pre>
  *
  * @method lt
  * @param {string} dependentKey
@@ -423,6 +572,18 @@ computed.lt = function (dependentKey, value) {
 /**
  * A computed property that returns true if first dependent property is less than the second dependent property
  * App.*-keys are supported
+ * <pre>
+ * var o = Em.Object.create({
+ *  p1: 4,
+ *  p2: 1,
+ *  p3: Em.computed.ltProperties('p1', 'p2')
+ * });
+ * console.log(o.get('p3')); // false
+ * o.set('p2', 4);
+ * console.log(o.get('p3')); // false
+ * o.set('p2', 5);
+ * console.log(o.get('p3')); // true
+ * </pre>
  *
  * @method gtProperties
  * @param {string} dependentKey1
@@ -437,6 +598,15 @@ computed.ltProperties = function (dependentKey1, dependentKey2) {
 
 /**
  * A computed property that returns true if dependent property is match to the needed regular expression
+ * <pre>
+ * var o = Em.Object.create({
+ *  p1: 'abc',
+ *  p2: Em.computed.lteProperties('p1', /^a/)
+ * });
+ * console.log(o.get('p2')); // true
+ * o.set('p1', 'bc');
+ * console.log(o.get('p2')); // false
+ * </pre>
  *
  * @method match
  * @param {string} dependentKey
@@ -455,6 +625,15 @@ computed.match = function (dependentKey, regexp) {
 
 /**
  * A computed property that returns true of some collection's item has property with needed value
+ * <pre>
+ * var o = Em.Object.create({
+ *  p1: [{a: 1}, {a: 2}, {a: 3}],
+ *  p2: Em.computed.someBy('p1', 'a', 1)
+ * });
+ * console.log(o.get('p2')); // true
+ * o.set('p1.0.a', 2);
+ * console.log(o.get('p2')); // false
+ * </pre>
  *
  * @method someBy
  * @param {string} collectionKey
@@ -474,6 +653,15 @@ computed.someBy = function (collectionKey, propertyName, neededValue) {
 
 /**
  * A computed property that returns true of all collection's items have property with needed value
+ * <pre>
+ * var o = Em.Object.create({
+ *  p1: [{a: 1}, {a: 1}, {a: 1}],
+ *  p2: Em.computed.everyBy('p1', 'a', 1)
+ * });
+ * console.log(o.get('p2')); // true
+ * o.set('p1.0.a', 2);
+ * console.log(o.get('p2')); // false
+ * </pre>
  *
  * @method everyBy
  * @param {string} collectionKey
@@ -493,6 +681,15 @@ computed.everyBy = function (collectionKey, propertyName, neededValue) {
 
 /**
  * A computed property that returns array with values of named property on all items in the collection
+ * <pre>
+ * var o = Em.Object.create({
+ *  p1: [{a: 1}, {a: 2}, {a: 3}],
+ *  p2: Em.computed.everyBy('p1', 'a')
+ * });
+ * console.log(o.get('p2')); // [1, 2, 3]
+ * o.set('p1.0.a', 2);
+ * console.log(o.get('p2')); // [2, 2, 3]
+ * </pre>
  *
  * @method mapBy
  * @param {string} collectionKey
@@ -511,6 +708,15 @@ computed.mapBy = function (collectionKey, propertyName) {
 
 /**
  * A computed property that returns array with collection's items that have needed property value
+ * <pre>
+ * var o = Em.Object.create({
+ *  p1: [{a: 1}, {a: 2}, {a: 3}],
+ *  p2: Em.computed.filterBy('p1', 'a', 2)
+ * });
+ * console.log(o.get('p2')); // [{a: 2}]
+ * o.set('p1.0.a', 2);
+ * console.log(o.get('p2')); // [{a: 2}, {a: 2}]
+ * </pre>
  *
  * @method filterBy
  * @param {string} collectionKey
@@ -530,6 +736,15 @@ computed.filterBy = function (collectionKey, propertyName, neededValue) {
 
 /**
  * A computed property that returns first collection's item that has needed property value
+ * <pre>
+ * var o = Em.Object.create({
+ *  p1: [{a: 1, b: 1}, {a: 2, b: 2}, {a: 3, b: 3}],
+ *  p2: Em.computed.findBy('p1', 'a', 2)
+ * });
+ * console.log(o.get('p2')); // [{a: 2, b: 2}]
+ * o.set('p1.0.a', 2);
+ * console.log(o.get('p2')); // [{a: 2, b: 1}]
+ * </pre>
  *
  * @method findBy
  * @param {string} collectionKey
@@ -551,6 +766,15 @@ computed.findBy = function (collectionKey, propertyName, neededValue) {
  * A computed property that returns value equal to the dependent
  * Should be used as 'short-name' for deeply-nested values
  * App.*-keys are supported
+ * <pre>
+ * var o = Em.Object.create({
+ *  p1: {a: {b: {c: 2}}},
+ *  p2: Em.computed.alias('p1.a.b.c')
+ * });
+ * console.log(o.get('p2')); // 2
+ * o.set('p1.a.b.c', 4);
+ * console.log(o.get('p2')); // 4
+ * </pre>
  *
  * @method alias
  * @param {string} dependentKey
@@ -564,6 +788,15 @@ computed.alias = function (dependentKey) {
 
 /**
  * A computed property that returns true if dependent property exists in the needed values
+ * <pre>
+ * var o = Em.Object.create({
+ *  p1: 2,
+ *  p2: Em.computed.existsIn('p1', [1, 2, 3])
+ * });
+ * console.log(o.get('p2')); // true
+ * o.set('p1', 4);
+ * console.log(o.get('p2')); // false
+ * </pre>
  *
  * @method existsIn
  * @param {string} dependentKey
@@ -579,6 +812,15 @@ computed.existsIn = function (dependentKey, neededValues) {
 
 /**
  * A computed property that returns true if dependent property doesn't exist in the needed values
+ * <pre>
+ * var o = Em.Object.create({
+ *  p1: 2,
+ *  p2: Em.computed.notExistsIn('p1', [1, 2, 3])
+ * });
+ * console.log(o.get('p2')); // false
+ * o.set('p1', 4);
+ * console.log(o.get('p2')); // true
+ * </pre>
  *
  * @method notExistsIn
  * @param {string} dependentKey
@@ -597,6 +839,16 @@ computed.notExistsIn = function (dependentKey, neededValues) {
  * If accuracy is 0 (by default), result is rounded to integer
  * Otherwise - result is float with provided accuracy
  * App.*-keys are supported
+ * <pre>
+ * var o = Em.Object.create({
+ *  p1: 2,
+ *  p2: 4,
+ *  p3: Em.computed.percents('p1', 'p2')
+ * });
+ * console.log(o.get('p3')); // 50
+ * o.set('p2', 5);
+ * console.log(o.get('p3')); // 40
+ * </pre>
  *
  * @method percents
  * @param {string} dependentKey1
@@ -621,6 +873,15 @@ computed.percents = function (dependentKey1, dependentKey2, accuracy) {
 
 /**
  * A computed property that returns result of <code>App.format.role</code> for dependent value
+ * <pre>
+ * var o = Em.Object.create({
+ *  p1: 'SECONDARY_NAMENODE',
+ *  p3: Em.computed.formatRole('p1')
+ * });
+ * console.log(o.get('p2')); // 'SNameNode'
+ * o.set('p1', 'FLUME_HANDLER);
+ * console.log(o.get('p2')); // 'Flume'
+ * </pre>
  *
  * @method formatRole
  * @param {string} dependentKey
@@ -635,6 +896,15 @@ computed.formatRole = function (dependentKey) {
 
 /**
  * A computed property that returns sum of the named property in the each collection's item
+ * <pre>
+ * var o = Em.Object.create({
+ *  p1: [{a: 1}, {a: 2}, {a: 3}],
+ *  p2: Em.computed.sumBy('p1', 'a')
+ * });
+ * console.log(o.get('p2')); // 6
+ * o.set('p1.0.a', 2);
+ * console.log(o.get('p2')); // 7
+ * </pre>
  *
  * @method sumBy
  * @param {string} collectionKey
@@ -674,9 +944,46 @@ computed.i18nFormat = generateComputedWithKey(function (key, dependentValues) {
 });
 
 /**
+ * A computed property that returns string formatted with dependent properties
+ * Takes at least one argument
+ * App.*-keys are supported
+ * <pre>
+ * var o = Em.Object.create({
+ *  p1: 'abc',
+ *  p2: 'cba',
+ *  p3: Em.computed.format('{0} => {1}', 'p1', 'p2')
+ * });
+ * console.log(o.get('p3')); // 'abc => cba'
+ * o.set('p1', 'aaa');
+ * console.log(o.get('p3')); // 'aaa => cba'
+ * </pre>
+ *
+ * @param {string} str string to format
+ * @param {...string} dependentKeys
+ * @method format
+ * @returns {Ember.ComputedProperty}
+ */
+computed.format = generateComputedWithKey(function (str, dependentValues) {
+  if (!str) {
+    return '';
+  }
+  return str.format.apply(str, dependentValues);
+});
+
+/**
  * A computed property that returns dependent values joined with separator
  * Takes at least one argument
  * App.*-keys are supported
+ * <pre>
+ * var o = Em.Object.create({
+ *  p1: 'abc',
+ *  p2: 'cba',
+ *  p3: Em.computed.concat('|', 'p1', 'p2')
+ * });
+ * console.log(o.get('p3')); // 'abc|cba'
+ * o.set('p1', 'aaa');
+ * console.log(o.get('p3')); // 'aaa|cba'
+ * </pre>
  *
  * @param {string} separator
  * @param {...string} dependentKeys
@@ -693,9 +1000,20 @@ computed.concat = generateComputedWithKey(function (separator, dependentValues)
  * Takes at least 1 argument
  * Dependent values order affects the result
  * App.*-keys are supported
+ * <pre>
+ * var o = Em.Object.create({
+ *  p1: null,
+ *  p2: '',
+ *  p3: 'abc'
+ *  p4: Em.computed.firstNotBlank('p1', 'p2', 'p3')
+ * });
+ * console.log(o.get('p4')); // 'abc'
+ * o.set('p1', 'aaa');
+ * console.log(o.get('p4')); // 'aaa'
+ * </pre>
  *
  * @param {...string} dependentKeys
- * @method {firstNotBlank}
+ * @method firstNotBlank
  * @return {Ember.ComputedProperty}
  */
 computed.firstNotBlank = generateComputedWithValues(function (values) {
@@ -705,4 +1023,69 @@ computed.firstNotBlank = generateComputedWithValues(function (values) {
     }
   }
   return null;
-});
\ No newline at end of file
+});
+
+/**
+ * A computed property that returns dependent value if it is truly or ('0'|0)
+ * Returns <code>'n/a'</code> otherwise
+ * App.*-keys are supported
+ * <pre>
+ * var o = Em.Object.create({
+ *  p1: 0,
+ *  p2: Em.computed.formatUnavailable('p1')
+ * });
+ * console.log(o.get('p2')); // 0
+ * o.set('p1', 12);
+ * console.log(o.get('p2')); // 12
+ * o.set('p1', 'some string');
+ * console.log(o.get('p2')); // 'n/a'
+ * </pre>
+ *
+ * @param {string} dependentKey
+ * @method formatUnavailable
+ * @returns {Ember.ComputedProperty}
+ */
+computed.formatUnavailable = function(dependentKey) {
+  return computed(dependentKey, function () {
+    var value = smartGet(this, dependentKey);
+    return (value || value == 0) ? value : Em.I18n.t('services.service.summary.notAvailable');
+  });
+};
+
+/**
+ * A computed property that returns one of provided values basing on dependent value
+ * If dependent value is 0, <code>zeroMsg</code> is returned
+ * If dependent value is 1, <code>oneMsg</code> is returned
+ * If dependent value is greater than 1, <code>manyMsg</code> is returned
+ * App.*-keys are supported
+ * <pre>
+ * var o = Em.Object.create({
+ *  p1: 0,
+ *  p2: Em.computed.formatUnavailable('p1', '0msg', '1msg', '2+msg')
+ * });
+ * console.log(o.get('p2')); // '0msg'
+ * o.set('p1', 1);
+ * console.log(o.get('p2')); // '1msg'
+ * o.set('p1', 100500);
+ * console.log(o.get('p2')); // '2+msg'
+ * </pre>
+ *
+ * @param {string} dependentKey
+ * @param {string} zeroMsg
+ * @param {string} oneMsg
+ * @param {string} manyMsg
+ * @returns {Ember.ComputedProperty}
+ * @method countBasedMessage
+ */
+computed.countBasedMessage = function (dependentKey, zeroMsg, oneMsg, manyMsg) {
+  return computed(dependentKey, function () {
+    var value = Number(smartGet(this, dependentKey));
+    if (value === 0) {
+      return zeroMsg;
+    }
+    if (value > 1) {
+      return manyMsg;
+    }
+    return oneMsg;
+  });
+};
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/fd29b081/ambari-web/test/utils/ember_computed_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/utils/ember_computed_test.js b/ambari-web/test/utils/ember_computed_test.js
index 4aa3158..3a22f99 100644
--- a/ambari-web/test/utils/ember_computed_test.js
+++ b/ambari-web/test/utils/ember_computed_test.js
@@ -806,6 +806,10 @@ describe('Ember.computed macros', function () {
       expect(this.obj.get('prop2')).to.be.false;
     });
 
+    it('`prop2` has valid dependent keys', function () {
+      expect(Em.meta(this.obj).descs.prop2._dependentKeys).to.eql(['prop1']);
+    });
+
   });
 
   describe('#someBy', function () {
@@ -831,6 +835,10 @@ describe('Ember.computed macros', function () {
       expect(this.obj.get('prop2')).to.be.false;
     });
 
+    it('`prop2` has valid dependent keys', function () {
+      expect(Em.meta(this.obj).descs.prop2._dependentKeys).to.eql(['prop1.@each.a']);
+    });
+
   });
 
   describe('#everyBy', function () {
@@ -856,6 +864,10 @@ describe('Ember.computed macros', function () {
       expect(this.obj.get('prop2')).to.be.false;
     });
 
+    it('`prop2` has valid dependent keys', function () {
+      expect(Em.meta(this.obj).descs.prop2._dependentKeys).to.eql(['prop1.@each.a']);
+    });
+
   });
 
   describe('#mapBy', function () {
@@ -881,6 +893,10 @@ describe('Ember.computed macros', function () {
       expect(this.obj.get('prop2')).to.eql([]);
     });
 
+    it('`prop2` has valid dependent keys', function () {
+      expect(Em.meta(this.obj).descs.prop2._dependentKeys).to.eql(['prop1.@each.a']);
+    });
+
   });
 
   describe('#filterBy', function () {
@@ -906,6 +922,10 @@ describe('Ember.computed macros', function () {
       expect(this.obj.get('prop2')).to.eql([]);
     });
 
+    it('`prop2` has valid dependent keys', function () {
+      expect(Em.meta(this.obj).descs.prop2._dependentKeys).to.eql(['prop1.@each.a']);
+    });
+
   });
 
   describe('#findBy', function () {
@@ -931,6 +951,10 @@ describe('Ember.computed macros', function () {
       expect(this.obj.get('prop2')).to.be.null;
     });
 
+    it('`prop2` has valid dependent keys', function () {
+      expect(Em.meta(this.obj).descs.prop2._dependentKeys).to.eql(['prop1.@each.a']);
+    });
+
   });
 
   describe('#alias', function() {
@@ -1306,4 +1330,140 @@ describe('Ember.computed macros', function () {
 
   });
 
+  describe('#format', function () {
+
+    beforeEach(function () {
+
+      App.setProperties({
+        someAnotherKey: 'some value'
+      });
+
+      this.obj = Em.Object.create({
+        prop1: 'abc',
+        prop2: 'cba',
+        prop3: 'aaa',
+        prop4: Em.computed.format('{0} {1} {2}', 'prop1', 'prop2', 'prop3'),
+        prop5: Em.computed.format(null, 'prop1', 'prop2', 'prop3'),
+        prop6: Em.computed.format('{0} {1} {2}', 'App.someRandomTestingKey', 'prop2', 'prop3')
+      });
+    });
+
+    it('`prop4` check dependent keys', function () {
+      expect(Em.meta(this.obj).descs.prop4._dependentKeys).to.eql(['prop1', 'prop2', 'prop3']);
+    });
+
+    it('should format message', function () {
+      expect(this.obj.get('prop4')).to.equal('abc cba aaa');
+    });
+
+    it('should format message (2)', function () {
+      this.obj.set('prop1', 'aaa');
+      expect(this.obj.get('prop4')).to.equal('aaa cba aaa');
+    });
+
+    it('empty string for not existing i18-key', function () {
+      expect(this.obj.get('prop5')).to.equal('');
+    });
+
+    it('`prop6` depends on App.* key', function () {
+      expect(this.obj.get('prop6')).to.equal('some value cba aaa');
+      App.set('someAnotherKey', '');
+      expect(this.obj.get('prop6')).to.equal(' cba aaa');
+    });
+
+    it('prop6 dependent keys are valid', function () {
+      expect(Em.meta(this.obj).descs.prop6._dependentKeys).to.eql(['App.someRandomTestingKey', 'prop2', 'prop3']);
+    });
+
+  });
+
+  describe('#formatUnavailable', function () {
+
+    beforeEach(function () {
+      App.setProperties({
+        someAnotherKey: 1
+      });
+
+      this.obj = Em.Object.create({
+        prop1: 1,
+        prop2: Em.computed.formatUnavailable('prop1'),
+        prop3: Em.computed.formatUnavailable('App.someRandomTestingKey')
+      });
+    });
+
+    it('`value` is 1', function () {
+      expect(this.obj.get('prop2')).to.equal(1);
+      expect(this.obj.get('prop3')).to.equal(1);
+    });
+
+    it('`value` is 0', function () {
+      App.set('someAnotherKey', 0);
+      this.obj.set('prop1', 0);
+      expect(this.obj.get('prop2')).to.equal(0);
+      expect(this.obj.get('prop3')).to.equal(0);
+    });
+
+    it('`value` is `0`', function () {
+      App.set('someAnotherKey', '0');
+      this.obj.set('prop1', '0');
+      expect(this.obj.get('prop2')).to.equal('0');
+      expect(this.obj.get('prop3')).to.equal('0');
+    });
+
+    it('`value` is not numeric', function () {
+      App.set('someAnotherKey', null);
+      this.obj.set('prop1', null);
+      expect(this.obj.get('prop2')).to.equal('n/a');
+      expect(this.obj.get('prop3')).to.equal('n/a');
+    });
+
+    it('prop3 dependent keys are valid', function () {
+      expect(Em.meta(this.obj).descs.prop3._dependentKeys).to.eql(['App.someRandomTestingKey']);
+    });
+
+  });
+
+  describe('#countBasedMessage', function () {
+
+    var msg0 = 'msg0';
+    var msg1 = 'msg1';
+    var msgM = 'msgM';
+
+    beforeEach(function () {
+      App.setProperties({
+        someAnotherKey: 1
+      });
+
+      this.obj = Em.Object.create({
+        prop1: 1,
+        prop2: Em.computed.countBasedMessage('prop1', msg0, msg1, msgM),
+        prop3: Em.computed.countBasedMessage('App.someRandomTestingKey', msg0, msg1, msgM)
+      });
+    });
+
+    it('`value` is 1', function () {
+      expect(this.obj.get('prop2')).to.equal(msg1);
+      expect(this.obj.get('prop3')).to.equal(msg1);
+    });
+
+    it('`value` is 0', function () {
+      App.set('someAnotherKey', 0);
+      this.obj.set('prop1', 0);
+      expect(this.obj.get('prop2')).to.equal(msg0);
+      expect(this.obj.get('prop3')).to.equal(msg0);
+    });
+
+    it('`value` is greater than 1', function () {
+      App.set('someAnotherKey', 3);
+      this.obj.set('prop1', 3);
+      expect(this.obj.get('prop2')).to.equal(msgM);
+      expect(this.obj.get('prop3')).to.equal(msgM);
+    });
+
+    it('prop3 dependent keys are valid', function () {
+      expect(Em.meta(this.obj).descs.prop3._dependentKeys).to.eql(['App.someRandomTestingKey']);
+    });
+
+  });
+
 });
\ No newline at end of file


[42/50] ambari git commit: AMBARI-14125. DB Admin Test connection from ambari screen does not work for Oracle DB 'SYS' user (aonishuk)

Posted by nc...@apache.org.
AMBARI-14125. DB Admin Test connection from ambari screen does not work for Oracle DB 'SYS' user (aonishuk)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/95a71a68
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/95a71a68
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/95a71a68

Branch: refs/heads/branch-dev-patch-upgrade
Commit: 95a71a68f6f8d9ad3df1a573262f330d65a45111
Parents: 2d8f69e
Author: Andrew Onishuk <ao...@hortonworks.com>
Authored: Tue Dec 1 13:21:02 2015 +0200
Committer: Andrew Onishuk <ao...@hortonworks.com>
Committed: Tue Dec 1 13:21:02 2015 +0200

----------------------------------------------------------------------
 .../src/main/resources/custom_actions/scripts/check_host.py     | 5 ++++-
 ambari-server/src/test/python/custom_actions/TestCheckHost.py   | 2 +-
 2 files changed, 5 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/95a71a68/ambari-server/src/main/resources/custom_actions/scripts/check_host.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/custom_actions/scripts/check_host.py b/ambari-server/src/main/resources/custom_actions/scripts/check_host.py
index 01d79fe..47bceec 100644
--- a/ambari-server/src/main/resources/custom_actions/scripts/check_host.py
+++ b/ambari-server/src/main/resources/custom_actions/scripts/check_host.py
@@ -377,11 +377,14 @@ class CheckHost(Script):
       db_connection_check_structured_output = {"exit_code" : 1, "message": message}
       return db_connection_check_structured_output
 
+    # For Oracle connection as SYS should be as SYSDBA
+    if db_name == DB_ORACLE and user_name.upper() == "SYS":
+      user_name = "SYS AS SYSDBA"
 
     # try to connect to db
     db_connection_check_command = format("{java_exec} -cp {check_db_connection_path}{class_path_delimiter}" \
            "{jdbc_jar_path} -Djava.library.path={java_library_path} org.apache.ambari.server.DBConnectionVerification \"{db_connection_url}\" " \
-           "{user_name} {user_passwd!p} {jdbc_driver_class}")
+           "\"{user_name}\" {user_passwd!p} {jdbc_driver_class}")
 
     if db_name == DB_SQLA:
       db_connection_check_command = "LD_LIBRARY_PATH=$LD_LIBRARY_PATH:{0}{1} {2}".format(agent_cache_dir,

http://git-wip-us.apache.org/repos/asf/ambari/blob/95a71a68/ambari-server/src/test/python/custom_actions/TestCheckHost.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/custom_actions/TestCheckHost.py b/ambari-server/src/test/python/custom_actions/TestCheckHost.py
index 1e45927..ec9c681 100644
--- a/ambari-server/src/test/python/custom_actions/TestCheckHost.py
+++ b/ambari-server/src/test/python/custom_actions/TestCheckHost.py
@@ -142,7 +142,7 @@ class TestCheckHost(TestCase):
                                                                                     'exit_code': 1}})
     self.assertEquals(format_mock.call_args[0][0],'{java_exec} -cp {check_db_connection_path}{class_path_delimiter}'
             '{jdbc_jar_path} -Djava.library.path={java_library_path} org.apache.ambari.server.DBConnectionVerification'
-            ' "{db_connection_url}" {user_name} {user_passwd!p} {jdbc_driver_class}')
+            ' "{db_connection_url}" "{user_name}" {user_passwd!p} {jdbc_driver_class}')
 
     # test, db connection success
     download_file_mock.reset_mock()


[02/50] ambari git commit: AMBARI-14065. Ranger audit to HDFS - Create prerequisite directories in HDFS

Posted by nc...@apache.org.
AMBARI-14065. Ranger audit to HDFS - Create prerequisite directories in HDFS


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/6c3cf499
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/6c3cf499
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/6c3cf499

Branch: refs/heads/branch-dev-patch-upgrade
Commit: 6c3cf4993e520c2bfc60707fa54e1aa0783bd557
Parents: db2ca77
Author: Gautam Borad <ga...@apache.org>
Authored: Thu Nov 26 12:59:46 2015 +0530
Committer: Gautam Borad <ga...@apache.org>
Committed: Fri Nov 27 11:49:12 2015 +0530

----------------------------------------------------------------------
 .../0.96.0.2.0/package/scripts/params_linux.py  |  1 +
 .../package/scripts/setup_ranger_hbase.py       | 27 +++++++++++++++
 .../2.1.0.2.0/package/scripts/hdfs_namenode.py  |  3 +-
 .../2.1.0.2.0/package/scripts/params_linux.py   |  1 +
 .../package/scripts/setup_ranger_hdfs.py        | 29 +++++++++++++++-
 .../0.12.0.2.0/package/scripts/params_linux.py  |  1 +
 .../package/scripts/setup_ranger_hive.py        | 19 +++++++++++
 .../KAFKA/0.8.1.2.2/package/scripts/params.py   | 33 +++++++++++++++++-
 .../package/scripts/setup_ranger_kafka.py       | 20 +++++++++++
 .../0.5.0.2.2/package/scripts/params_linux.py   | 28 +++++++++++++++
 .../package/scripts/setup_ranger_knox.py        | 20 +++++++++++
 .../0.9.1.2.1/package/scripts/params_linux.py   | 36 ++++++++++++++++++--
 .../package/scripts/setup_ranger_storm.py       | 20 +++++++++++
 .../2.1.0.2.0/package/scripts/params_linux.py   |  1 +
 .../package/scripts/setup_ranger_yarn.py        | 19 +++++++++++
 .../stacks/HDP/2.3/role_command_order.json      |  6 +++-
 16 files changed, 257 insertions(+), 7 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/6c3cf499/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params_linux.py
index 7dee23b..a05abd4 100644
--- a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params_linux.py
@@ -326,6 +326,7 @@ if has_ranger_admin:
 
   ranger_audit_solr_urls = config['configurations']['ranger-admin-site']['ranger.audit.solr.urls']
   xa_audit_db_is_enabled = config['configurations']['ranger-hbase-audit']['xasecure.audit.destination.db'] if xml_configurations_supported else None
+  xa_audit_hdfs_is_enabled = config['configurations']['ranger-hbase-audit']['xasecure.audit.destination.hdfs'] if xml_configurations_supported else None
   ssl_keystore_password = unicode(config['configurations']['ranger-hbase-policymgr-ssl']['xasecure.policymgr.clientssl.keystore.password']) if xml_configurations_supported else None
   ssl_truststore_password = unicode(config['configurations']['ranger-hbase-policymgr-ssl']['xasecure.policymgr.clientssl.truststore.password']) if xml_configurations_supported else None
   credential_file = format('/etc/ranger/{repo_name}/cred.jceks') if xml_configurations_supported else None

http://git-wip-us.apache.org/repos/asf/ambari/blob/6c3cf499/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/setup_ranger_hbase.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/setup_ranger_hbase.py b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/setup_ranger_hbase.py
index 1d1be6c..5c68583 100644
--- a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/setup_ranger_hbase.py
+++ b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/setup_ranger_hbase.py
@@ -39,6 +39,33 @@ def setup_ranger_hbase(upgrade_type=None):
     else:
       Logger.info("HBase: Setup ranger: command retry not enabled thus skipping if ranger admin is down !")
 
+    if params.xml_configurations_supported and params.enable_ranger_hbase and params.xa_audit_hdfs_is_enabled:
+      params.HdfsResource("/ranger/audit",
+                         type="directory",
+                         action="create_on_execute",
+                         owner=params.hdfs_user,
+                         group=params.hdfs_user,
+                         mode=0755,
+                         recursive_chmod=True
+      )
+      params.HdfsResource("/ranger/audit/hbaseMaster",
+                         type="directory",
+                         action="create_on_execute",
+                         owner=params.hbase_user,
+                         group=params.hbase_user,
+                         mode=0700,
+                         recursive_chmod=True
+      )
+      params.HdfsResource("/ranger/audit/hbaseRegional",
+                         type="directory",
+                         action="create_on_execute",
+                         owner=params.hbase_user,
+                         group=params.hbase_user,
+                         mode=0700,
+                         recursive_chmod=True
+      )
+      params.HdfsResource(None, action="execute")
+
     setup_ranger_plugin('hbase-client', 'hbase', 
                         params.downloaded_custom_connector, params.driver_curl_source,
                         params.driver_curl_target, params.java64_home,

http://git-wip-us.apache.org/repos/asf/ambari/blob/6c3cf499/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_namenode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_namenode.py b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_namenode.py
index 44119ab..0902637 100644
--- a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_namenode.py
+++ b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_namenode.py
@@ -38,7 +38,7 @@ from resource_management.core.exceptions import Fail
 from resource_management.core.logger import Logger
 
 from utils import service, safe_zkfc_op, is_previous_fs_image
-from setup_ranger_hdfs import setup_ranger_hdfs
+from setup_ranger_hdfs import setup_ranger_hdfs, create_ranger_audit_hdfs_directories
 
 
 @OsFamilyFuncImpl(os_family=OsFamilyImpl.DEFAULT)
@@ -177,6 +177,7 @@ def namenode(action=None, hdfs_binary=None, do_format=True, upgrade_type=None, e
 
     # Always run this on non-HA, or active NameNode during HA.
     create_hdfs_directories(is_active_namenode_cmd)
+    create_ranger_audit_hdfs_directories(is_active_namenode_cmd)
 
   elif action == "stop":
     import params

http://git-wip-us.apache.org/repos/asf/ambari/blob/6c3cf499/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params_linux.py
index 587306b..b67a4ae 100644
--- a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params_linux.py
@@ -469,6 +469,7 @@ if has_ranger_admin:
   
   ranger_audit_solr_urls = config['configurations']['ranger-admin-site']['ranger.audit.solr.urls']
   xa_audit_db_is_enabled = config['configurations']['ranger-hdfs-audit']['xasecure.audit.destination.db'] if xml_configurations_supported else None
+  xa_audit_hdfs_is_enabled = config['configurations']['ranger-hdfs-audit']['xasecure.audit.destination.hdfs'] if xml_configurations_supported else None
   ssl_keystore_password = unicode(config['configurations']['ranger-hdfs-policymgr-ssl']['xasecure.policymgr.clientssl.keystore.password']) if xml_configurations_supported else None
   ssl_truststore_password = unicode(config['configurations']['ranger-hdfs-policymgr-ssl']['xasecure.policymgr.clientssl.truststore.password']) if xml_configurations_supported else None
   credential_file = format('/etc/ranger/{repo_name}/cred.jceks') if xml_configurations_supported else None

http://git-wip-us.apache.org/repos/asf/ambari/blob/6c3cf499/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/setup_ranger_hdfs.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/setup_ranger_hdfs.py b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/setup_ranger_hdfs.py
index bd158ec..622dcba 100644
--- a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/setup_ranger_hdfs.py
+++ b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/setup_ranger_hdfs.py
@@ -35,7 +35,7 @@ def setup_ranger_hdfs(upgrade_type=None):
       hdp_version = params.version
 
     if params.retryAble:
-        Logger.info("HDFS: Setup ranger: command retry enables thus retrying if ranger admin is down !")
+      Logger.info("HDFS: Setup ranger: command retry enables thus retrying if ranger admin is down !")
     else:
       Logger.info("HDFS: Setup ranger: command retry not enabled thus skipping if ranger admin is down !")
 
@@ -56,3 +56,30 @@ def setup_ranger_hdfs(upgrade_type=None):
                         hdp_version_override = hdp_version, skip_if_rangeradmin_down= not params.retryAble)
   else:
     Logger.info('Ranger admin not installed')
+
+def create_ranger_audit_hdfs_directories(check):
+  import params
+
+  if params.has_ranger_admin:
+    if params.xml_configurations_supported and params.enable_ranger_hdfs and params.xa_audit_hdfs_is_enabled:
+      params.HdfsResource("/ranger/audit",
+                         type="directory",
+                         action="create_on_execute",
+                         owner=params.hdfs_user,
+                         group=params.hdfs_user,
+                         mode=0755,
+                         recursive_chmod=True,
+                         only_if=check
+      )
+      params.HdfsResource("/ranger/audit/hdfs",
+                         type="directory",
+                         action="create_on_execute",
+                         owner=params.hdfs_user,
+                         group=params.hdfs_user,
+                         mode=0700,
+                         recursive_chmod=True,
+                         only_if=check
+      )
+      params.HdfsResource(None, action="execute", only_if=check)
+  else:
+    Logger.info('Ranger admin not installed')

http://git-wip-us.apache.org/repos/asf/ambari/blob/6c3cf499/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
index f360651..a2131b0 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
@@ -555,6 +555,7 @@ if has_ranger_admin:
   xa_audit_db_password = unicode(config['configurations']['admin-properties']['audit_db_password'])
   ranger_audit_solr_urls = config['configurations']['ranger-admin-site']['ranger.audit.solr.urls']
   xa_audit_db_is_enabled = config['configurations']['ranger-hive-audit']['xasecure.audit.destination.db'] if xml_configurations_supported else None
+  xa_audit_hdfs_is_enabled = config['configurations']['ranger-hive-audit']['xasecure.audit.destination.hdfs'] if xml_configurations_supported else None
   ssl_keystore_password = unicode(config['configurations']['ranger-hive-policymgr-ssl']['xasecure.policymgr.clientssl.keystore.password']) if xml_configurations_supported else None
   ssl_truststore_password = unicode(config['configurations']['ranger-hive-policymgr-ssl']['xasecure.policymgr.clientssl.truststore.password']) if xml_configurations_supported else None
   credential_file = format('/etc/ranger/{repo_name}/cred.jceks') if xml_configurations_supported else None

http://git-wip-us.apache.org/repos/asf/ambari/blob/6c3cf499/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/setup_ranger_hive.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/setup_ranger_hive.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/setup_ranger_hive.py
index c17def0..8b2e4e4 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/setup_ranger_hive.py
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/setup_ranger_hive.py
@@ -39,6 +39,25 @@ def setup_ranger_hive(upgrade_type = None):
     else:
       Logger.info("Hive: Setup ranger: command retry not enabled thus skipping if ranger admin is down !")
 
+    if params.xml_configurations_supported and params.enable_ranger_hive and params.xa_audit_hdfs_is_enabled:
+      params.HdfsResource("/ranger/audit",
+                         type="directory",
+                         action="create_on_execute",
+                         owner=params.hdfs_user,
+                         group=params.hdfs_user,
+                         mode=0755,
+                         recursive_chmod=True
+      )
+      params.HdfsResource("/ranger/audit/hiveServer2",
+                         type="directory",
+                         action="create_on_execute",
+                         owner=params.hive_user,
+                         group=params.hive_user,
+                         mode=0700,
+                         recursive_chmod=True
+      )
+      params.HdfsResource(None, action="execute")
+
     setup_ranger_plugin('hive-server2', 'hive', 
                         params.ranger_downloaded_custom_connector, params.ranger_driver_curl_source,
                         params.ranger_driver_curl_target, params.java64_home,

http://git-wip-us.apache.org/repos/asf/ambari/blob/6c3cf499/ambari-server/src/main/resources/common-services/KAFKA/0.8.1.2.2/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/KAFKA/0.8.1.2.2/package/scripts/params.py b/ambari-server/src/main/resources/common-services/KAFKA/0.8.1.2.2/package/scripts/params.py
index bd4fa6c..da76952 100644
--- a/ambari-server/src/main/resources/common-services/KAFKA/0.8.1.2.2/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/KAFKA/0.8.1.2.2/package/scripts/params.py
@@ -24,9 +24,12 @@ from resource_management.libraries.functions.default import default
 from utils import get_bare_principal
 from resource_management.libraries.functions.get_hdp_version import get_hdp_version
 from resource_management.libraries.functions.is_empty import is_empty
-
 import status_params
 from resource_management.core.logger import Logger
+from resource_management.libraries.resources.hdfs_resource import HdfsResource
+from resource_management.libraries.functions import hdp_select
+from resource_management.libraries.functions import conf_select
+from resource_management.libraries.functions import get_kinit_path
 
 
 # server configurations
@@ -232,6 +235,7 @@ if has_ranger_admin and is_supported_kafka_ranger:
 
   ranger_audit_solr_urls = config['configurations']['ranger-admin-site']['ranger.audit.solr.urls']
   xa_audit_db_is_enabled = config['configurations']['ranger-kafka-audit']['xasecure.audit.destination.db'] if xml_configurations_supported else None
+  xa_audit_hdfs_is_enabled = config['configurations']['ranger-kafka-audit']['xasecure.audit.destination.hdfs'] if xml_configurations_supported else None
   ssl_keystore_password = unicode(config['configurations']['ranger-kafka-policymgr-ssl']['xasecure.policymgr.clientssl.keystore.password']) if xml_configurations_supported else None
   ssl_truststore_password = unicode(config['configurations']['ranger-kafka-policymgr-ssl']['xasecure.policymgr.clientssl.truststore.password']) if xml_configurations_supported else None
   credential_file = format('/etc/ranger/{repo_name}/cred.jceks') if xml_configurations_supported else None
@@ -244,3 +248,30 @@ if has_ranger_admin and is_supported_kafka_ranger:
   if xa_audit_db_flavor == 'sqla':
     xa_audit_db_is_enabled = False
 
+namenode_hosts = default("/clusterHostInfo/namenode_host", [])
+has_namenode = not len(namenode_hosts) == 0
+
+hdfs_user = config['configurations']['hadoop-env']['hdfs_user'] if has_namenode else None
+hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab'] if has_namenode else None
+hdfs_principal_name = config['configurations']['hadoop-env']['hdfs_principal_name'] if has_namenode else None
+hdfs_site = config['configurations']['hdfs-site'] if has_namenode else None
+default_fs = config['configurations']['core-site']['fs.defaultFS'] if has_namenode else None
+hadoop_bin_dir = hdp_select.get_hadoop_dir("bin") if has_namenode else None
+hadoop_conf_dir = conf_select.get_hadoop_conf_dir() if has_namenode else None
+kinit_path_local = get_kinit_path(default('/configurations/kerberos-env/executable_search_paths', None))
+
+import functools
+#create partial functions with common arguments for every HdfsResource call
+#to create/delete hdfs directory/file/copyfromlocal we need to call params.HdfsResource in code
+HdfsResource = functools.partial(
+  HdfsResource,
+  user=hdfs_user,
+  security_enabled = security_enabled,
+  keytab = hdfs_user_keytab,
+  kinit_path_local = kinit_path_local,
+  hadoop_bin_dir = hadoop_bin_dir,
+  hadoop_conf_dir = hadoop_conf_dir,
+  principal_name = hdfs_principal_name,
+  hdfs_site = hdfs_site,
+  default_fs = default_fs
+)
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/6c3cf499/ambari-server/src/main/resources/common-services/KAFKA/0.8.1.2.2/package/scripts/setup_ranger_kafka.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/KAFKA/0.8.1.2.2/package/scripts/setup_ranger_kafka.py b/ambari-server/src/main/resources/common-services/KAFKA/0.8.1.2.2/package/scripts/setup_ranger_kafka.py
index c210791..a99dc76 100644
--- a/ambari-server/src/main/resources/common-services/KAFKA/0.8.1.2.2/package/scripts/setup_ranger_kafka.py
+++ b/ambari-server/src/main/resources/common-services/KAFKA/0.8.1.2.2/package/scripts/setup_ranger_kafka.py
@@ -30,6 +30,26 @@ def setup_ranger_kafka():
     else:
       Logger.info("Kafka: Setup ranger: command retry not enabled thus skipping if ranger admin is down !")
 
+    if params.xml_configurations_supported and params.enable_ranger_kafka and params.xa_audit_hdfs_is_enabled:
+      if params.has_namenode:
+        params.HdfsResource("/ranger/audit",
+                           type="directory",
+                           action="create_on_execute",
+                           owner=params.hdfs_user,
+                           group=params.hdfs_user,
+                           mode=0755,
+                           recursive_chmod=True
+        )
+        params.HdfsResource("/ranger/audit/kafka",
+                           type="directory",
+                           action="create_on_execute",
+                           owner=params.kafka_user,
+                           group=params.kafka_user,
+                           mode=0700,
+                           recursive_chmod=True
+        )
+        params.HdfsResource(None, action="execute")
+
     setup_ranger_plugin('kafka-broker', 'kafka', 
                         params.downloaded_custom_connector, params.driver_curl_source,
                         params.driver_curl_target, params.java64_home,

http://git-wip-us.apache.org/repos/asf/ambari/blob/6c3cf499/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/params_linux.py
index c723de9..ec972f6 100644
--- a/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/params_linux.py
@@ -28,6 +28,9 @@ from resource_management.libraries.functions.get_port_from_url import get_port_f
 from resource_management.libraries.functions import get_kinit_path
 from resource_management.libraries.script.script import Script
 from status_params import *
+from resource_management.libraries.resources.hdfs_resource import HdfsResource
+from resource_management.libraries.functions import hdp_select
+from resource_management.libraries.functions import conf_select
 
 # server configurations
 config = Script.get_config()
@@ -318,6 +321,7 @@ if has_ranger_admin:
   
   ranger_audit_solr_urls = config['configurations']['ranger-admin-site']['ranger.audit.solr.urls']
   xa_audit_db_is_enabled = config['configurations']['ranger-knox-audit']['xasecure.audit.destination.db'] if xml_configurations_supported else None
+  xa_audit_hdfs_is_enabled = config['configurations']['ranger-knox-audit']['xasecure.audit.destination.hdfs'] if xml_configurations_supported else None
   ssl_keystore_password = unicode(config['configurations']['ranger-knox-policymgr-ssl']['xasecure.policymgr.clientssl.keystore.password']) if xml_configurations_supported else None
   ssl_truststore_password = unicode(config['configurations']['ranger-knox-policymgr-ssl']['xasecure.policymgr.clientssl.truststore.password']) if xml_configurations_supported else None
   credential_file = format('/etc/ranger/{repo_name}/cred.jceks') if xml_configurations_supported else None
@@ -325,3 +329,27 @@ if has_ranger_admin:
   #For SQLA explicitly disable audit to DB for Ranger
   if xa_audit_db_flavor == 'sqla':
     xa_audit_db_is_enabled = False
+
+hdfs_user = config['configurations']['hadoop-env']['hdfs_user'] if has_namenode else None
+hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab'] if has_namenode else None
+hdfs_principal_name = config['configurations']['hadoop-env']['hdfs_principal_name'] if has_namenode else None
+hdfs_site = config['configurations']['hdfs-site'] if has_namenode else None
+default_fs = config['configurations']['core-site']['fs.defaultFS'] if has_namenode else None
+hadoop_bin_dir = hdp_select.get_hadoop_dir("bin") if has_namenode else None
+hadoop_conf_dir = conf_select.get_hadoop_conf_dir() if has_namenode else None
+
+import functools
+#create partial functions with common arguments for every HdfsResource call
+#to create/delete hdfs directory/file/copyfromlocal we need to call params.HdfsResource in code
+HdfsResource = functools.partial(
+  HdfsResource,
+  user=hdfs_user,
+  security_enabled = security_enabled,
+  keytab = hdfs_user_keytab,
+  kinit_path_local = kinit_path_local,
+  hadoop_bin_dir = hadoop_bin_dir,
+  hadoop_conf_dir = hadoop_conf_dir,
+  principal_name = hdfs_principal_name,
+  hdfs_site = hdfs_site,
+  default_fs = default_fs
+)
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/6c3cf499/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/setup_ranger_knox.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/setup_ranger_knox.py b/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/setup_ranger_knox.py
index 8ea1427..1a08d54 100644
--- a/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/setup_ranger_knox.py
+++ b/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/setup_ranger_knox.py
@@ -38,6 +38,26 @@ def setup_ranger_knox(upgrade_type=None):
     else:
       Logger.info("Knox: Setup ranger: command retry not enabled thus skipping if ranger admin is down !")
 
+    if params.xml_configurations_supported and params.enable_ranger_knox and params.xa_audit_hdfs_is_enabled:
+      if params.has_namenode:
+        params.HdfsResource("/ranger/audit",
+                           type="directory",
+                           action="create_on_execute",
+                           owner=params.hdfs_user,
+                           group=params.hdfs_user,
+                           mode=0755,
+                           recursive_chmod=True
+        )
+        params.HdfsResource("/ranger/audit/knox",
+                           type="directory",
+                           action="create_on_execute",
+                           owner=params.knox_user,
+                           group=params.knox_user,
+                           mode=0700,
+                           recursive_chmod=True
+        )
+        params.HdfsResource(None, action="execute")
+
     setup_ranger_plugin('knox-server', 'knox',
                         params.downloaded_custom_connector, params.driver_curl_source,
                         params.driver_curl_target, params.java_home,

http://git-wip-us.apache.org/repos/asf/ambari/blob/6c3cf499/ambari-server/src/main/resources/common-services/STORM/0.9.1.2.1/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/STORM/0.9.1.2.1/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/STORM/0.9.1.2.1/package/scripts/params_linux.py
index f5d944c..f186a89 100644
--- a/ambari-server/src/main/resources/common-services/STORM/0.9.1.2.1/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/STORM/0.9.1.2.1/package/scripts/params_linux.py
@@ -29,9 +29,10 @@ from resource_management.libraries.functions.version import format_hdp_stack_ver
 from resource_management.libraries.functions.default import default
 from resource_management.libraries.functions.get_bare_principal import get_bare_principal
 from resource_management.libraries.script import Script
-
-
-
+from resource_management.libraries.resources.hdfs_resource import HdfsResource
+from resource_management.libraries.functions import hdp_select
+from resource_management.libraries.functions import conf_select
+from resource_management.libraries.functions import get_kinit_path
 
 # server configurations
 config = Script.get_config()
@@ -260,6 +261,7 @@ if has_ranger_admin:
 
   ranger_audit_solr_urls = config['configurations']['ranger-admin-site']['ranger.audit.solr.urls']
   xa_audit_db_is_enabled = config['configurations']['ranger-storm-audit']['xasecure.audit.destination.db'] if xml_configurations_supported else None
+  xa_audit_hdfs_is_enabled = config['configurations']['ranger-storm-audit']['xasecure.audit.destination.hdfs'] if xml_configurations_supported else None
   ssl_keystore_password = unicode(config['configurations']['ranger-storm-policymgr-ssl']['xasecure.policymgr.clientssl.keystore.password']) if xml_configurations_supported else None
   ssl_truststore_password = unicode(config['configurations']['ranger-storm-policymgr-ssl']['xasecure.policymgr.clientssl.truststore.password']) if xml_configurations_supported else None
   credential_file = format('/etc/ranger/{repo_name}/cred.jceks') if xml_configurations_supported else None
@@ -267,3 +269,31 @@ if has_ranger_admin:
   #For SQLA explicitly disable audit to DB for Ranger
   if xa_audit_db_flavor == 'sqla':
     xa_audit_db_is_enabled = False
+
+namenode_hosts = default("/clusterHostInfo/namenode_host", [])
+has_namenode = not len(namenode_hosts) == 0
+
+hdfs_user = config['configurations']['hadoop-env']['hdfs_user'] if has_namenode else None
+hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab'] if has_namenode else None
+hdfs_principal_name = config['configurations']['hadoop-env']['hdfs_principal_name'] if has_namenode else None
+hdfs_site = config['configurations']['hdfs-site'] if has_namenode else None
+default_fs = config['configurations']['core-site']['fs.defaultFS'] if has_namenode else None
+hadoop_bin_dir = hdp_select.get_hadoop_dir("bin") if has_namenode else None
+hadoop_conf_dir = conf_select.get_hadoop_conf_dir() if has_namenode else None
+kinit_path_local = get_kinit_path(default('/configurations/kerberos-env/executable_search_paths', None))
+
+import functools
+#create partial functions with common arguments for every HdfsResource call
+#to create/delete hdfs directory/file/copyfromlocal we need to call params.HdfsResource in code
+HdfsResource = functools.partial(
+  HdfsResource,
+  user=hdfs_user,
+  security_enabled = security_enabled,
+  keytab = hdfs_user_keytab,
+  kinit_path_local = kinit_path_local,
+  hadoop_bin_dir = hadoop_bin_dir,
+  hadoop_conf_dir = hadoop_conf_dir,
+  principal_name = hdfs_principal_name,
+  hdfs_site = hdfs_site,
+  default_fs = default_fs
+)
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/6c3cf499/ambari-server/src/main/resources/common-services/STORM/0.9.1.2.1/package/scripts/setup_ranger_storm.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/STORM/0.9.1.2.1/package/scripts/setup_ranger_storm.py b/ambari-server/src/main/resources/common-services/STORM/0.9.1.2.1/package/scripts/setup_ranger_storm.py
index 037f20a..a76457f 100644
--- a/ambari-server/src/main/resources/common-services/STORM/0.9.1.2.1/package/scripts/setup_ranger_storm.py
+++ b/ambari-server/src/main/resources/common-services/STORM/0.9.1.2.1/package/scripts/setup_ranger_storm.py
@@ -41,6 +41,26 @@ def setup_ranger_storm(upgrade_type=None):
     else:
       Logger.info("Storm: Setup ranger: command retry not enabled thus skipping if ranger admin is down !")
 
+    if params.xml_configurations_supported and params.enable_ranger_storm and params.xa_audit_hdfs_is_enabled:
+      if params.has_namenode:
+        params.HdfsResource("/ranger/audit",
+                           type="directory",
+                           action="create_on_execute",
+                           owner=params.hdfs_user,
+                           group=params.hdfs_user,
+                           mode=0755,
+                           recursive_chmod=True
+        )
+        params.HdfsResource("/ranger/audit/storm",
+                           type="directory",
+                           action="create_on_execute",
+                           owner=params.storm_user,
+                           group=params.storm_user,
+                           mode=0700,
+                           recursive_chmod=True
+        )
+        params.HdfsResource(None, action="execute")
+
     setup_ranger_plugin('storm-nimbus', 'storm',
                         params.downloaded_custom_connector, params.driver_curl_source,
                         params.driver_curl_target, params.java64_home,

http://git-wip-us.apache.org/repos/asf/ambari/blob/6c3cf499/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params_linux.py
index d45375f..cb8f77b 100644
--- a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params_linux.py
@@ -390,6 +390,7 @@ if has_ranger_admin:
 
     ranger_audit_solr_urls = config['configurations']['ranger-admin-site']['ranger.audit.solr.urls']
     xa_audit_db_is_enabled = config['configurations']['ranger-yarn-audit']['xasecure.audit.destination.db'] if xml_configurations_supported else None
+    xa_audit_hdfs_is_enabled = config['configurations']['ranger-yarn-audit']['xasecure.audit.destination.hdfs'] if xml_configurations_supported else None
     ssl_keystore_password = unicode(config['configurations']['ranger-yarn-policymgr-ssl']['xasecure.policymgr.clientssl.keystore.password']) if xml_configurations_supported else None
     ssl_truststore_password = unicode(config['configurations']['ranger-yarn-policymgr-ssl']['xasecure.policymgr.clientssl.truststore.password']) if xml_configurations_supported else None
     credential_file = format('/etc/ranger/{repo_name}/cred.jceks') if xml_configurations_supported else None

http://git-wip-us.apache.org/repos/asf/ambari/blob/6c3cf499/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/setup_ranger_yarn.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/setup_ranger_yarn.py b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/setup_ranger_yarn.py
index 5db65d0d..21fe8e1 100644
--- a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/setup_ranger_yarn.py
+++ b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/setup_ranger_yarn.py
@@ -28,6 +28,25 @@ def setup_ranger_yarn():
     else:
       Logger.info("YARN: Setup ranger: command retry not enabled thus skipping if ranger admin is down !")
 
+    if params.xml_configurations_supported and params.enable_ranger_yarn and params.xa_audit_hdfs_is_enabled:
+      params.HdfsResource("/ranger/audit",
+                         type="directory",
+                         action="create_on_execute",
+                         owner=params.hdfs_user,
+                         group=params.hdfs_user,
+                         mode=0755,
+                         recursive_chmod=True
+      )
+      params.HdfsResource("/ranger/audit/yarn",
+                         type="directory",
+                         action="create_on_execute",
+                         owner=params.yarn_user,
+                         group=params.yarn_user,
+                         mode=0700,
+                         recursive_chmod=True
+      )
+      params.HdfsResource(None, action="execute")
+
     setup_ranger_plugin('hadoop-yarn-resourcemanager', 'yarn', 
                         params.downloaded_custom_connector, params.driver_curl_source,
                         params.driver_curl_target, params.java64_home,

http://git-wip-us.apache.org/repos/asf/ambari/blob/6c3cf499/ambari-server/src/main/resources/stacks/HDP/2.3/role_command_order.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/role_command_order.json b/ambari-server/src/main/resources/stacks/HDP/2.3/role_command_order.json
index bfe286b..d634ce1 100755
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/role_command_order.json
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/role_command_order.json
@@ -11,6 +11,10 @@
     "SPARK_THRIFTSERVER-START" : ["NAMENODE-START", "HIVE_METASTORE-START"],
     "HAWQMASTER-START" : ["NAMENODE-START","DATANODE-START","HAWQSTANDBY-START"],
     "HAWQSEGMENT-START" : ["HAWQMASTER-START","HAWQSTANDBY-START"],
-    "HAWQ_SERVICE_CHECK-SERVICE_CHECK" : ["HAWQMASTER-START"]
+    "HAWQ_SERVICE_CHECK-SERVICE_CHECK" : ["HAWQMASTER-START"],
+    "KNOX_GATEWAY-START" : ["RANGER_USERSYNC-START", "NAMENODE-START"],
+    "KAFKA_BROKER-START" : ["ZOOKEEPER_SERVER-START", "RANGER_USERSYNC-START", "NAMENODE-START"],
+    "NIMBUS-START" : ["ZOOKEEPER_SERVER-START", "RANGER_USERSYNC-START", "NAMENODE-START"],
+    "STORM_UI_SERVER-START" : ["NIMBUS-START", "NAMENODE-START"]
   }
 }


[20/50] ambari git commit: AMBARI-14111 Going back from install, start & test page HBase property "hbase.coprocessor.region.classes" is not retaining value

Posted by nc...@apache.org.
AMBARI-14111 Going back from install, start & test page HBase property "hbase.coprocessor.region.classes" is not retaining value


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/1eb64079
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/1eb64079
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/1eb64079

Branch: refs/heads/branch-dev-patch-upgrade
Commit: 1eb64079548342fb1550eae2cbd3ddabf9398598
Parents: c4d5ff9
Author: Andrii Tkach <at...@hortonworks.com>
Authored: Mon Nov 30 14:24:12 2015 +0200
Committer: Andrii Tkach <at...@hortonworks.com>
Committed: Mon Nov 30 14:24:12 2015 +0200

----------------------------------------------------------------------
 ambari-web/app/mixins/common/serverValidator.js | 7 +++++--
 1 file changed, 5 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/1eb64079/ambari-web/app/mixins/common/serverValidator.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/mixins/common/serverValidator.js b/ambari-web/app/mixins/common/serverValidator.js
index a1c7eba..7cfc918 100644
--- a/ambari-web/app/mixins/common/serverValidator.js
+++ b/ambari-web/app/mixins/common/serverValidator.js
@@ -121,8 +121,11 @@ App.ServerValidatorMixin = Em.Mixin.create({
    * @returns {*}
    */
   loadServerSideConfigsRecommendations: function() {
-    // if extended controller doesn't support recommendations ignore this call but keep promise chain
-    if (!this.get('isControllerSupportsEnhancedConfigs')) {
+    /**
+     * if extended controller doesn't support recommendations or recommendations has been already loaded
+     * ignore this call but keep promise chain
+     */
+    if (!this.get('isControllerSupportsEnhancedConfigs') || !Em.isNone(this.get('recommendationsConfigs'))) {
       return $.Deferred().resolve().promise();
     }
     var recommendations = this.get('hostGroups');


[23/50] ambari git commit: AMBARI-14079 - Unable to Finalize Upgrade After Removing Host In Maintenance Mode (jonathanhurley)

Posted by nc...@apache.org.
AMBARI-14079 - Unable to Finalize Upgrade After Removing Host In Maintenance Mode (jonathanhurley)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/bbe5e76b
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/bbe5e76b
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/bbe5e76b

Branch: refs/heads/branch-dev-patch-upgrade
Commit: bbe5e76bdd2cdb973e5deea836706ba436c47b60
Parents: 61abd86
Author: Jonathan Hurley <jh...@hortonworks.com>
Authored: Wed Nov 25 18:40:13 2015 -0500
Committer: Jonathan Hurley <jh...@hortonworks.com>
Committed: Mon Nov 30 12:32:27 2015 -0500

----------------------------------------------------------------------
 .../ambari/server/orm/entities/HostEntity.java  | 13 ++-
 .../upgrades/FinalizeUpgradeAction.java         |  3 +-
 .../server/state/cluster/ClusterImpl.java       | 16 ++--
 .../server/state/cluster/ClustersImpl.java      | 36 +++++----
 .../upgrades/UpgradeActionTest.java             | 85 +++++++++++++++++++-
 5 files changed, 121 insertions(+), 32 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/bbe5e76b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/HostEntity.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/HostEntity.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/HostEntity.java
index 502c060..2f94e67 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/HostEntity.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/HostEntity.java
@@ -278,18 +278,27 @@ public class HostEntity implements Comparable<HostEntity> {
     if (this == o) {
       return true;
     }
+
     if (o == null || getClass() != o.getClass()) {
       return false;
     }
 
     HostEntity that = (HostEntity) o;
 
-    return hostId == that.hostId && hostName.equals(that.hostName);
+    return getHostId() == that.getHostId() && hostName.equals(that.hostName);
   }
 
+  /**
+   * {@inheritDoc}
+   */
   @Override
   public int hashCode() {
-    return (null == hostId ? 0 : hostId.hashCode());
+    int result = null != getHostId() ? getHostId().hashCode() : 0;
+    if (null != hostName) {
+      result = 31 * result + hostName.hashCode();
+    }
+
+    return result;
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/ambari/blob/bbe5e76b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/FinalizeUpgradeAction.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/FinalizeUpgradeAction.java b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/FinalizeUpgradeAction.java
index ba4dadc..9331ef0 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/FinalizeUpgradeAction.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/FinalizeUpgradeAction.java
@@ -163,7 +163,8 @@ public class FinalizeUpgradeAction extends AbstractServerAction {
         boolean hostHasCorrectVersionState = false;
         RepositoryVersionState hostVersionState = hostVersion.getState();
         switch( hostVersionState ){
-          case UPGRADED:{
+          case UPGRADED:
+          case CURRENT:{
             // if the state is correct, then do nothing
             hostHasCorrectVersionState = true;
             break;

http://git-wip-us.apache.org/repos/asf/ambari/blob/bbe5e76b/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClusterImpl.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClusterImpl.java b/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClusterImpl.java
index 0f5a717..7ced845 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClusterImpl.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClusterImpl.java
@@ -37,8 +37,6 @@ import java.util.concurrent.locks.ReentrantReadWriteLock;
 
 import javax.persistence.RollbackException;
 
-import com.google.common.base.Predicate;
-import com.google.common.collect.Iterables;
 import org.apache.ambari.server.AmbariException;
 import org.apache.ambari.server.ConfigGroupNotFoundException;
 import org.apache.ambari.server.DuplicateResourceException;
@@ -101,6 +99,7 @@ import org.apache.ambari.server.state.Host;
 import org.apache.ambari.server.state.HostHealthStatus;
 import org.apache.ambari.server.state.HostState;
 import org.apache.ambari.server.state.MaintenanceState;
+import org.apache.ambari.server.state.PropertyInfo;
 import org.apache.ambari.server.state.RepositoryVersionState;
 import org.apache.ambari.server.state.SecurityType;
 import org.apache.ambari.server.state.Service;
@@ -111,9 +110,8 @@ import org.apache.ambari.server.state.ServiceComponentHostEventType;
 import org.apache.ambari.server.state.ServiceFactory;
 import org.apache.ambari.server.state.ServiceInfo;
 import org.apache.ambari.server.state.StackId;
-import org.apache.ambari.server.state.State;
-import org.apache.ambari.server.state.PropertyInfo;
 import org.apache.ambari.server.state.StackInfo;
+import org.apache.ambari.server.state.State;
 import org.apache.ambari.server.state.configgroup.ConfigGroup;
 import org.apache.ambari.server.state.configgroup.ConfigGroupFactory;
 import org.apache.ambari.server.state.fsm.InvalidStateTransitionException;
@@ -124,7 +122,9 @@ import org.apache.commons.lang.StringUtils;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import com.google.common.base.Predicate;
 import com.google.common.collect.HashMultimap;
+import com.google.common.collect.Iterables;
 import com.google.common.collect.ListMultimap;
 import com.google.common.collect.Multimap;
 import com.google.common.collect.Sets;
@@ -1669,8 +1669,7 @@ public class ClusterImpl implements Cluster {
 
       if (state == RepositoryVersionState.CURRENT) {
         for (HostEntity hostEntity : clusterEntity.getHostEntities()) {
-          if (hostHasReportables(existingClusterVersion.getRepositoryVersion(),
-              hostEntity)) {
+          if (hostHasReportables(existingClusterVersion.getRepositoryVersion(), hostEntity)) {
             continue;
           }
 
@@ -1827,8 +1826,9 @@ public class ClusterImpl implements Cluster {
         return null;
       }
       for(Map.Entry<String, Config> entry: allConfigs.get(configType).entrySet()) {
-        if(entry.getValue().getVersion().equals(configVersion))
+        if(entry.getValue().getVersion().equals(configVersion)) {
           return entry.getValue();
+        }
       }
       return null;
     } finally {
@@ -3011,7 +3011,7 @@ public class ClusterImpl implements Cluster {
   @Override
   public Map<PropertyInfo.PropertyType, Set<String>> getConfigPropertiesTypes(String configType){
     try {
-      StackId stackId = this.getCurrentStackVersion();
+      StackId stackId = getCurrentStackVersion();
       StackInfo stackInfo = ambariMetaInfo.getStack(stackId.getStackName(), stackId.getStackVersion());
       return stackInfo.getConfigPropertiesTypes(configType);
     } catch (AmbariException e) {

http://git-wip-us.apache.org/repos/asf/ambari/blob/bbe5e76b/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClustersImpl.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClustersImpl.java b/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClustersImpl.java
index e332d23..ca09ab8 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClustersImpl.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClustersImpl.java
@@ -18,9 +18,20 @@
 
 package org.apache.ambari.server.state.cluster;
 
-import com.google.inject.Inject;
-import com.google.inject.Singleton;
-import com.google.inject.persist.Transactional;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.locks.Lock;
+import java.util.concurrent.locks.ReentrantReadWriteLock;
+
+import javax.persistence.RollbackException;
+
 import org.apache.ambari.server.AmbariException;
 import org.apache.ambari.server.ClusterNotFoundException;
 import org.apache.ambari.server.DuplicateResourceException;
@@ -54,9 +65,9 @@ import org.apache.ambari.server.orm.entities.PrivilegeEntity;
 import org.apache.ambari.server.orm.entities.ResourceEntity;
 import org.apache.ambari.server.orm.entities.ResourceTypeEntity;
 import org.apache.ambari.server.orm.entities.StackEntity;
-import org.apache.ambari.server.security.authorization.ResourceType;
 import org.apache.ambari.server.security.SecurityHelper;
 import org.apache.ambari.server.security.authorization.AmbariGrantedAuthority;
+import org.apache.ambari.server.security.authorization.ResourceType;
 import org.apache.ambari.server.state.AgentVersion;
 import org.apache.ambari.server.state.Cluster;
 import org.apache.ambari.server.state.Clusters;
@@ -74,18 +85,9 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.springframework.security.core.GrantedAuthority;
 
-import javax.persistence.RollbackException;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import java.util.concurrent.ConcurrentHashMap;
-import java.util.concurrent.locks.Lock;
-import java.util.concurrent.locks.ReentrantReadWriteLock;
+import com.google.inject.Inject;
+import com.google.inject.Singleton;
+import com.google.inject.persist.Transactional;
 
 @Singleton
 public class ClustersImpl implements Clusters {
@@ -781,7 +783,7 @@ public class ClustersImpl implements Clusters {
     clusterEntity.getHostEntities().remove(hostEntity);
 
     hostDAO.merge(hostEntity);
-    clusterDAO.merge(clusterEntity);
+    clusterDAO.merge(clusterEntity, true);
   }
 
   @Transactional

http://git-wip-us.apache.org/repos/asf/ambari/blob/bbe5e76b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/UpgradeActionTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/UpgradeActionTest.java b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/UpgradeActionTest.java
index e88c8a7..38370c7 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/UpgradeActionTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/UpgradeActionTest.java
@@ -49,6 +49,7 @@ import org.apache.ambari.server.orm.dao.RepositoryVersionDAO;
 import org.apache.ambari.server.orm.dao.StackDAO;
 import org.apache.ambari.server.orm.entities.ClusterVersionEntity;
 import org.apache.ambari.server.orm.entities.HostVersionEntity;
+import org.apache.ambari.server.orm.entities.RepositoryVersionEntity;
 import org.apache.ambari.server.orm.entities.StackEntity;
 import org.apache.ambari.server.state.Cluster;
 import org.apache.ambari.server.state.Clusters;
@@ -205,6 +206,9 @@ public class UpgradeActionTest {
     host.setHostAttributes(hostAttributes);
     host.persist();
 
+    // without this, HostEntity will not have a relation to ClusterEntity
+    clusters.mapHostsToCluster(Collections.singleton(hostName), clusterName);
+
     // Create the starting repo version
     m_helper.getOrCreateRepositoryVersion(sourceStack, sourceRepo);
     c.createClusterVersion(sourceStack, sourceRepo, "admin", RepositoryVersionState.UPGRADING);
@@ -214,6 +218,7 @@ public class UpgradeActionTest {
     String urlInfo = "[{'repositories':["
         + "{'Repositories/base_url':'http://foo1','Repositories/repo_name':'HDP','Repositories/repo_id':'" + targetStack.getStackId() + "'}"
         + "], 'OperatingSystems/os_type':'redhat6'}]";
+
     repoVersionDAO.create(stackEntityTarget, targetRepo, String.valueOf(System.currentTimeMillis()), urlInfo);
 
     // Start upgrading the newer repo
@@ -226,13 +231,23 @@ public class UpgradeActionTest {
     c.mapHostVersions(Collections.singleton(hostName), c.getCurrentClusterVersion(),
         RepositoryVersionState.CURRENT);
 
+    // create a single host with the UPGRADED HostVersionEntity
     HostDAO hostDAO = m_injector.getInstance(HostDAO.class);
 
-    HostVersionEntity entity = new HostVersionEntity();
-    entity.setHostEntity(hostDAO.findByName(hostName));
-    entity.setRepositoryVersion(repoVersionDAO.findByStackAndVersion(targetStack, targetRepo));
-    entity.setState(RepositoryVersionState.UPGRADED);
+    RepositoryVersionEntity repositoryVersionEntity = repoVersionDAO.findByStackAndVersion(
+        targetStack, targetRepo);
+
+    HostVersionEntity entity = new HostVersionEntity(hostDAO.findByName(hostName),
+        repositoryVersionEntity, RepositoryVersionState.UPGRADED);
+
     hostVersionDAO.create(entity);
+
+    // verify the UPGRADED host versions were created successfully
+    List<HostVersionEntity> hostVersions = hostVersionDAO.findByClusterStackAndVersion(clusterName,
+        targetStack, targetRepo);
+
+    assertEquals(1, hostVersions.size());
+    assertEquals(RepositoryVersionState.UPGRADED, hostVersions.get(0).getState());
   }
 
   private void makeCrossStackUpgradeCluster(StackId sourceStack, String sourceRepo, StackId targetStack, String targetRepo) throws Exception {
@@ -383,6 +398,68 @@ public class UpgradeActionTest {
     verifyBaseRepoURL(helper, cluster, host, "http://foo1");
   }
 
+  /**
+   * Tests that finalize still works when there are hosts which are already
+   * {@link RepositoryVersionState#CURRENT}.
+   *
+   * @throws Exception
+   */
+  @Test
+  public void testFinalizeWithHostsAlreadyCurrent() throws Exception {
+    StackId sourceStack = HDP_21_STACK;
+    StackId targetStack = HDP_21_STACK;
+    String sourceRepo = HDP_2_1_1_0;
+    String targetRepo = HDP_2_1_1_1;
+
+    makeUpgradeCluster(sourceStack, sourceRepo, targetStack, targetRepo);
+
+    // move the old version from CURRENT to INSTALLED and the new version from
+    // UPGRADED to CURRENT - this will simulate what happens when a host is
+    // removed before finalization and all hosts transition to CURRENT
+    List<HostVersionEntity> hostVersions = hostVersionDAO.findAll();
+    for (HostVersionEntity hostVersion : hostVersions) {
+      if (hostVersion.getState() == RepositoryVersionState.CURRENT) {
+        hostVersion.setState(RepositoryVersionState.INSTALLED);
+      } else {
+        hostVersion.setState(RepositoryVersionState.CURRENT);
+      }
+
+      hostVersionDAO.merge(hostVersion);
+    }
+
+    // Verify the repo before calling Finalize
+    AmbariMetaInfo metaInfo = m_injector.getInstance(AmbariMetaInfo.class);
+    AmbariCustomCommandExecutionHelper helper = m_injector.getInstance(AmbariCustomCommandExecutionHelper.class);
+    Clusters clusters = m_injector.getInstance(Clusters.class);
+    Host host = clusters.getHost("h1");
+    Cluster cluster = clusters.getCluster("c1");
+
+    RepositoryInfo repo = metaInfo.getRepository(sourceStack.getStackName(),
+        sourceStack.getStackVersion(), "redhat6", sourceStack.getStackId());
+    assertEquals(HDP_211_CENTOS6_REPO_URL, repo.getBaseUrl());
+    verifyBaseRepoURL(helper, cluster, host, HDP_211_CENTOS6_REPO_URL);
+
+    // Finalize the upgrade
+    Map<String, String> commandParams = new HashMap<String, String>();
+    commandParams.put(FinalizeUpgradeAction.UPGRADE_DIRECTION_KEY, "upgrade");
+    commandParams.put(FinalizeUpgradeAction.VERSION_KEY, targetRepo);
+
+    ExecutionCommand executionCommand = new ExecutionCommand();
+    executionCommand.setCommandParams(commandParams);
+    executionCommand.setClusterName("c1");
+
+    HostRoleCommand hostRoleCommand = hostRoleCommandFactory.create(null, null, null, null);
+    hostRoleCommand.setExecutionCommandWrapper(new ExecutionCommandWrapper(executionCommand));
+
+    FinalizeUpgradeAction action = m_injector.getInstance(FinalizeUpgradeAction.class);
+    action.setExecutionCommand(executionCommand);
+    action.setHostRoleCommand(hostRoleCommand);
+
+    CommandReport report = action.execute(null);
+    assertNotNull(report);
+    assertEquals(HostRoleStatus.COMPLETED.name(), report.getStatus());
+  }
+
   private void verifyBaseRepoURL(AmbariCustomCommandExecutionHelper helper, Cluster cluster, Host host, String expectedRepoBaseURL) throws AmbariException {
     String repoInfo = helper.getRepoInfo(cluster, host);
     Gson gson = new Gson();


[48/50] ambari git commit: AMBARI-14095. Upgrade: second click shows diff results

Posted by nc...@apache.org.
AMBARI-14095. Upgrade: second click shows diff results


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/c5d7cf64
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/c5d7cf64
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/c5d7cf64

Branch: refs/heads/branch-dev-patch-upgrade
Commit: c5d7cf643fe9fccacadd5b47a52792f87bc7bb73
Parents: 58f6266
Author: Alex Antonenko <hi...@gmail.com>
Authored: Tue Dec 1 15:12:06 2015 +0200
Committer: Alex Antonenko <hi...@gmail.com>
Committed: Tue Dec 1 16:02:15 2015 +0200

----------------------------------------------------------------------
 ambari-web/app/assets/test/tests.js             |   1 +
 .../app/controllers/main/admin/kerberos.js      |  16 +-
 .../main/admin/stack_and_upgrade_controller.js  |  40 ++-
 .../modal_popups/cluster_check_dialog.hbs       |  24 +-
 .../common/modal_popups/cluster_check_popup.js  | 107 ++------
 .../admin/stack_and_upgrade_controller_test.js  |   2 +-
 .../modal_popups/cluster_check_popup_test.js    | 271 +++++++++++++++++++
 7 files changed, 356 insertions(+), 105 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/c5d7cf64/ambari-web/app/assets/test/tests.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/assets/test/tests.js b/ambari-web/app/assets/test/tests.js
index f8d59f1..23460b7 100644
--- a/ambari-web/app/assets/test/tests.js
+++ b/ambari-web/app/assets/test/tests.js
@@ -202,6 +202,7 @@ var files = [
   'test/views/common/widget/graph_widget_view_test',
   'test/views/common/widget/number_widget_view_test',
   'test/views/common/widget/gauge_widget_view_test',
+  'test/views/common/modal_popups/cluster_check_popup_test',
   'test/views/common/modal_popups/hosts_table_list_popup_test',
   'test/views/common/modal_popups/dependent_configs_list_popup_test',
   'test/views/main/admin_test',

http://git-wip-us.apache.org/repos/asf/ambari/blob/c5d7cf64/ambari-web/app/controllers/main/admin/kerberos.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/controllers/main/admin/kerberos.js b/ambari-web/app/controllers/main/admin/kerberos.js
index 57ee8c1..4e5bcd4 100644
--- a/ambari-web/app/controllers/main/admin/kerberos.js
+++ b/ambari-web/app/controllers/main/admin/kerberos.js
@@ -221,10 +221,18 @@ App.MainAdminKerberosController = App.KerberosWizardStep4Controller.extend({
   runSecurityCheckSuccess: function (data, opt, params) {
     //TODO correct check
     if (data.items.someProperty('UpgradeChecks.status', "FAIL")) {
-      var header = Em.I18n.t('popup.clusterCheck.Security.header').format(params.label);
-      var title = Em.I18n.t('popup.clusterCheck.Security.title');
-      var alert = Em.I18n.t('popup.clusterCheck.Security.alert');
-      App.showClusterCheckPopup(data, header, title, alert);
+      var
+        hasFails = data.items.someProperty('UpgradeChecks.status', 'FAIL'),
+        header = Em.I18n.t('popup.clusterCheck.Security.header').format(params.label),
+        title = Em.I18n.t('popup.clusterCheck.Security.title'),
+        alert = Em.I18n.t('popup.clusterCheck.Security.alert');
+
+      App.showClusterCheckPopup(data, {
+        header: header,
+        failTitle: title,
+        failAlert: alert,
+        noCallbackCondition: hasFails
+      });
     } else {
       this.startKerberosWizard();
     }

http://git-wip-us.apache.org/repos/asf/ambari/blob/c5d7cf64/ambari-web/app/controllers/main/admin/stack_and_upgrade_controller.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/controllers/main/admin/stack_and_upgrade_controller.js b/ambari-web/app/controllers/main/admin/stack_and_upgrade_controller.js
index f331540..d553fdb 100644
--- a/ambari-web/app/controllers/main/admin/stack_and_upgrade_controller.js
+++ b/ambari-web/app/controllers/main/admin/stack_and_upgrade_controller.js
@@ -718,9 +718,11 @@ App.MainAdminStackAndUpgradeController = Em.Controller.extend(App.LocalStorage,
             warningTitle = Em.I18n.t('popup.clusterCheck.Upgrade.warning.title'),
             warningAlert = new Em.Handlebars.SafeString(Em.I18n.t('popup.clusterCheck.Upgrade.warning.alert')),
             configsMergeWarning = data.items.findProperty('UpgradeChecks.id', "CONFIG_MERGE"),
+            popupData = {
+              items: data.items.rejectProperty('UpgradeChecks.id', 'CONFIG_MERGE')
+            },
             configs = [];
           if (configsMergeWarning && Em.get(configsMergeWarning, 'UpgradeChecks.status') === 'WARNING') {
-            data.items = data.items.rejectProperty('UpgradeChecks.id', 'CONFIG_MERGE');
             var configsMergeCheckData = Em.get(configsMergeWarning, 'UpgradeChecks.failed_detail');
             if (configsMergeCheckData) {
               configs = configsMergeCheckData.map(function (item) {
@@ -738,12 +740,21 @@ App.MainAdminStackAndUpgradeController = Em.Controller.extend(App.LocalStorage,
               });
             }
           }
-          App.showPreUpgradeCheckPopup(data, header, failTitle, failAlert, warningTitle, warningAlert, function () {
-            self.runPreUpgradeCheckOnly.call(self, {
-              value: version.get('repositoryVersion'),
-              label: version.get('displayName'),
-              type: event.context.get('type')
-            });
+          App.showClusterCheckPopup(popupData, {
+            header: header,
+            failTitle: failTitle,
+            failAlert: failAlert,
+            warningTitle: warningTitle,
+            warningAlert: warningAlert,
+            primary: Em.I18n.t('admin.stackVersions.version.upgrade.upgradeOptions.preCheck.rerun'),
+            secondary: Em.I18n.t('common.cancel'),
+            callback: function () {
+              self.runPreUpgradeCheckOnly.call(self, {
+                value: version.get('repositoryVersion'),
+                label: version.get('displayName'),
+                type: event.context.get('type')
+              });
+            }
           }, configs, version.get('displayName'));
         }
       }),
@@ -958,7 +969,8 @@ App.MainAdminStackAndUpgradeController = Em.Controller.extend(App.LocalStorage,
     var self = this;
     if (data.items.someProperty('UpgradeChecks.status', 'FAIL') || data.items.someProperty('UpgradeChecks.status', 'WARNING')) {
       this.set('requestInProgress', false);
-      var header = Em.I18n.t('popup.clusterCheck.Upgrade.header').format(params.label),
+      var hasFails = data.items.someProperty('UpgradeChecks.status', 'FAIL'),
+        header = Em.I18n.t('popup.clusterCheck.Upgrade.header').format(params.label),
         failTitle = Em.I18n.t('popup.clusterCheck.Upgrade.fail.title'),
         failAlert = new Em.Handlebars.SafeString(Em.I18n.t('popup.clusterCheck.Upgrade.fail.alert')),
         warningTitle = Em.I18n.t('popup.clusterCheck.Upgrade.warning.title'),
@@ -984,8 +996,16 @@ App.MainAdminStackAndUpgradeController = Em.Controller.extend(App.LocalStorage,
           });
         }
       }
-      App.showClusterCheckPopup(data, header, failTitle, failAlert, warningTitle, warningAlert, function () {
-        self.upgrade(params);
+      App.showClusterCheckPopup(data, {
+        header: header,
+        failTitle: failTitle,
+        failAlert: failAlert,
+        warningTitle: warningTitle,
+        warningAlert: warningAlert,
+        noCallbackCondition: hasFails,
+        callback: function () {
+          self.upgrade(params);
+        }
       }, configs, params.label);
     } else {
       this.upgrade(params);

http://git-wip-us.apache.org/repos/asf/ambari/blob/c5d7cf64/ambari-web/app/templates/common/modal_popups/cluster_check_dialog.hbs
----------------------------------------------------------------------
diff --git a/ambari-web/app/templates/common/modal_popups/cluster_check_dialog.hbs b/ambari-web/app/templates/common/modal_popups/cluster_check_dialog.hbs
index 8767d70..8fd2746 100644
--- a/ambari-web/app/templates/common/modal_popups/cluster_check_dialog.hbs
+++ b/ambari-web/app/templates/common/modal_popups/cluster_check_dialog.hbs
@@ -20,10 +20,14 @@
     <i class="icon-ok"></i>&nbsp;<span>{{t admin.stackVersions.version.upgrade.upgradeOptions.preCheck.allPassed.msg}}</span>
   {{/if}}
   {{#if view.fails.length}}
-    <h4>{{view.failTitle}}</h4>
-    <div class="alert alert-warning">
-      {{view.failAlert}}
-    </div>
+    {{#if view.failTitle}}
+      <h4>{{view.failTitle}}</h4>
+    {{/if}}
+    {{#if view.failAlert}}
+      <div class="alert alert-warning">
+        {{view.failAlert}}
+      </div>
+    {{/if}}
     <div class="limited-height-2">
       {{#each item in view.fails}}
         <i class="icon-remove"></i>&nbsp;<span>{{item.UpgradeChecks.check}}</span>
@@ -32,10 +36,14 @@
     </div>
   {{/if}}
   {{#if view.warnings.length}}
-    <h4>{{view.warningTitle}}</h4>
-    <div class="alert alert-warning">
-      {{view.warningAlert}}
-    </div>
+    {{#if view.warningTitle}}
+      <h4>{{view.warningTitle}}</h4>
+    {{/if}}
+    {{#if view.warningAlert}}
+      <div class="alert alert-warning">
+        {{view.warningAlert}}
+      </div>
+    {{/if}}
     <div class="limited-height-2">
       {{#each item in view.warnings}}
         <i class="icon-warning-sign"></i>&nbsp;<span>{{item.UpgradeChecks.check}}</span>

http://git-wip-us.apache.org/repos/asf/ambari/blob/c5d7cf64/ambari-web/app/views/common/modal_popups/cluster_check_popup.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views/common/modal_popups/cluster_check_popup.js b/ambari-web/app/views/common/modal_popups/cluster_check_popup.js
index 681c807..54f20ba 100644
--- a/ambari-web/app/views/common/modal_popups/cluster_check_popup.js
+++ b/ambari-web/app/views/common/modal_popups/cluster_check_popup.js
@@ -22,30 +22,33 @@ var App = require('app');
  * popup to display requirements that are not met
  * for current action
  * @param data
- * @param header
- * @param failTitle
- * @param failAlert
- * @param warningTitle
- * @param warningAlert
- * @param callback
+ * @param popup
  * @param configs
  * @param upgradeVersion
  * @returns {*|void}
  */
-App.showClusterCheckPopup = function (data, header, failTitle, failAlert, warningTitle, warningAlert, callback, configs, upgradeVersion) {
+App.showClusterCheckPopup = function (data, popup, configs, upgradeVersion) {
   var fails = data.items.filterProperty('UpgradeChecks.status', 'FAIL'),
     warnings = data.items.filterProperty('UpgradeChecks.status', 'WARNING'),
     hasConfigsMergeConflicts = !!(configs && configs.length),
-    popupBody = {
-      failTitle: failTitle,
-      failAlert: failAlert,
-      warningTitle: warningTitle,
-      warningAlert: warningAlert,
-      templateName: require('templates/common/modal_popups/cluster_check_dialog'),
-      fails: fails,
-      warnings: warnings,
-      hasConfigsMergeConflicts: hasConfigsMergeConflicts
-    };
+    primary,
+    secondary,
+    popupBody;
+  popup = popup || {};
+  primary = Em.isNone(popup.primary) ?
+    (fails.length ? Em.I18n.t('common.dismiss') : Em.I18n.t('common.proceedAnyway')) : popup.primary;
+  secondary = Em.isNone(popup.secondary) ? (fails.length ? false : Em.I18n.t('common.cancel')) : popup.secondary;
+  popupBody = {
+    failTitle: popup.failTitle,
+    failAlert: popup.failAlert,
+    warningTitle: popup.warningTitle,
+    warningAlert: popup.warningAlert,
+    templateName: require('templates/common/modal_popups/cluster_check_dialog'),
+    fails: fails,
+    warnings: warnings,
+    hasConfigsMergeConflicts: hasConfigsMergeConflicts,
+    isAllPassed: !fails.length && !warnings.length && !hasConfigsMergeConflicts
+  };
   if (hasConfigsMergeConflicts) {
     popupBody.configsMergeTable = Em.View.extend({
       templateName: require('templates/main/admin/stack_upgrade/upgrade_configs_merge_table'),
@@ -58,75 +61,15 @@ App.showClusterCheckPopup = function (data, header, failTitle, failAlert, warnin
     });
   }
   return App.ModalPopup.show({
-    primary: fails.length ? Em.I18n.t('common.dismiss') : Em.I18n.t('common.proceedAnyway'),
-    secondary: fails.length ? false : Em.I18n.t('common.cancel'),
-    header: header,
-    classNames: ['cluster-check-popup'],
-    bodyClass: Em.View.extend(popupBody),
-    onPrimary: function () {
-      if (!fails.length && callback) {
-        callback();
-      }
-      this._super();
-    },
-    didInsertElement: function () {
-      this._super();
-      this.fitHeight();
-    }
-  });
-};
-
-
-/**
- * popup to display requirements that are not met
- * for current action
- * @param data
- * @param header
- * @param failTitle
- * @param failAlert
- * @param warningTitle
- * @param warningAlert
- * @param callback
- * @param configs
- * @param upgradeVersion
- * @returns {*|void}
- */
-App.showPreUpgradeCheckPopup = function (data, header, failTitle, failAlert, warningTitle, warningAlert, callback, configs, upgradeVersion) {
-  var fails = data.items.filterProperty('UpgradeChecks.status', 'FAIL'),
-    warnings = data.items.filterProperty('UpgradeChecks.status', 'WARNING'),
-    hasConfigsMergeConflicts = !!(configs && configs.length),
-    popupBody = {
-      failTitle: failTitle,
-      failAlert: failAlert,
-      warningTitle: warningTitle,
-      warningAlert: warningAlert,
-      templateName: require('templates/common/modal_popups/cluster_check_dialog'),
-      fails: fails,
-      warnings: warnings,
-      hasConfigsMergeConflicts: hasConfigsMergeConflicts,
-      isAllPassed: !fails.length && !warnings.length && !hasConfigsMergeConflicts.length
-    };
-  if (hasConfigsMergeConflicts) {
-    popupBody.configsMergeTable = Em.View.extend({
-      templateName: require('templates/main/admin/stack_upgrade/upgrade_configs_merge_table'),
-      configs: configs,
-      didInsertElement: function () {
-        App.tooltip($('.recommended-value'), {
-          title: upgradeVersion
-        });
-      }
-    });
-  }
-  return App.ModalPopup.show({
-    primary: Em.I18n.t('admin.stackVersions.version.upgrade.upgradeOptions.preCheck.rerun'),
-    secondary: Em.I18n.t('common.cancel'),
-    header: header,
+    primary: primary,
+    secondary: secondary,
+    header: popup.header,
     classNames: ['cluster-check-popup'],
     bodyClass: Em.View.extend(popupBody),
     onPrimary: function () {
       this._super();
-      if (callback) {
-        callback();
+      if (!popup.noCallbackCondition && popup.callback) {
+        popup.callback();
       }
     },
     didInsertElement: function () {

http://git-wip-us.apache.org/repos/asf/ambari/blob/c5d7cf64/ambari-web/test/controllers/main/admin/stack_and_upgrade_controller_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/controllers/main/admin/stack_and_upgrade_controller_test.js b/ambari-web/test/controllers/main/admin/stack_and_upgrade_controller_test.js
index 65739cd..b9c7cb7 100644
--- a/ambari-web/test/controllers/main/admin/stack_and_upgrade_controller_test.js
+++ b/ambari-web/test/controllers/main/admin/stack_and_upgrade_controller_test.js
@@ -454,7 +454,7 @@ describe('App.MainAdminStackAndUpgradeController', function() {
         expect(controller.upgrade.callCount).to.equal(item.upgradeCalledCount);
         expect(App.showClusterCheckPopup.callCount).to.equal(item.showClusterCheckPopupCalledCount);
         if (item.check.id == 'CONFIG_MERGE') {
-          expect(App.showClusterCheckPopup.firstCall.args[7]).to.eql(item.configs);
+          expect(App.showClusterCheckPopup.firstCall.args[2]).to.eql(item.configs);
         }
       });
     });

http://git-wip-us.apache.org/repos/asf/ambari/blob/c5d7cf64/ambari-web/test/views/common/modal_popups/cluster_check_popup_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/views/common/modal_popups/cluster_check_popup_test.js b/ambari-web/test/views/common/modal_popups/cluster_check_popup_test.js
new file mode 100644
index 0000000..7f02730
--- /dev/null
+++ b/ambari-web/test/views/common/modal_popups/cluster_check_popup_test.js
@@ -0,0 +1,271 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+var App = require('app');
+require('views/common/modal_popups/cluster_check_popup');
+
+describe('App.showClusterCheckPopup', function () {
+
+  var isCallbackExecuted,
+    callback = function () {
+      isCallbackExecuted = true;
+    },
+    cases = [
+      {
+        inputData: {
+          data: {
+            items: [
+              {
+                UpgradeChecks: {
+                  id: 'p0',
+                  status: 'PASS'
+                }
+              },
+              {
+                UpgradeChecks: {
+                  id: 'p1',
+                  status: 'PASS'
+                }
+              }
+            ]
+          }
+        },
+        result: {
+          primary: Em.I18n.t('common.proceedAnyway'),
+          secondary: Em.I18n.t('common.cancel'),
+          header: '&nbsp;'
+        },
+        bodyResult: {
+          failTitle: undefined,
+          failAlert: undefined,
+          warningTitle: undefined,
+          warningAlert: undefined,
+          fails: [],
+          warnings: [],
+          hasConfigsMergeConflicts: false,
+          isAllPassed: true
+        },
+        isCallbackExecuted: false,
+        title: 'no fails, no warnings, no popup customization'
+      },
+      {
+        inputData: {
+          data: {
+            items: [
+              {
+                UpgradeChecks: {
+                  id: 'w0',
+                  status: 'WARNING'
+                }
+              },
+              {
+                UpgradeChecks: {
+                  id: 'w1',
+                  status: 'WARNING'
+                }
+              }
+            ]
+          },
+          popup: {
+            header: 'checks',
+            failTitle: 'fail',
+            failAlert: 'something has failed',
+            warningTitle: 'warning',
+            warningAlert: 'something is not good',
+            callback: callback
+          }
+        },
+        result: {
+          primary: Em.I18n.t('common.proceedAnyway'),
+          secondary: Em.I18n.t('common.cancel'),
+          header: 'checks'
+        },
+        bodyResult: {
+          failTitle: 'fail',
+          failAlert: 'something has failed',
+          warningTitle: 'warning',
+          warningAlert: 'something is not good',
+          fails: [],
+          warnings: [
+            {
+              UpgradeChecks: {
+                id: 'w0',
+                status: 'WARNING'
+              }
+            },
+            {
+              UpgradeChecks: {
+                id: 'w1',
+                status: 'WARNING'
+              }
+            }
+          ],
+          hasConfigsMergeConflicts: false,
+          isAllPassed: false
+        },
+        isCallbackExecuted: true,
+        title: 'no fails, default buttons, callback executed'
+      },
+      {
+        inputData: {
+          data: {
+            items: [
+              {
+                UpgradeChecks: {
+                  id: 'f0',
+                  status: 'FAIL'
+                }
+              },
+              {
+                UpgradeChecks: {
+                  id: 'f1',
+                  status: 'FAIL'
+                }
+              }
+            ]
+          },
+          popup: {
+            callback: callback,
+            noCallbackCondition: true
+          }
+        },
+        result: {
+          primary: Em.I18n.t('common.dismiss'),
+          secondary: false,
+          header: '&nbsp;'
+        },
+        bodyResult: {
+          failTitle: undefined,
+          failAlert: undefined,
+          warningTitle: undefined,
+          warningAlert: undefined,
+          fails: [
+            {
+              UpgradeChecks: {
+                id: 'f0',
+                status: 'FAIL'
+              }
+            },
+            {
+              UpgradeChecks: {
+                id: 'f1',
+                status: 'FAIL'
+              }
+            }
+          ],
+          warnings: [],
+          hasConfigsMergeConflicts: false,
+          isAllPassed: false
+        },
+        isCallbackExecuted: false,
+        title: 'fails detected, default buttons, callback not executed'
+      },
+      {
+        inputData: {
+          data: {
+            items: [
+              {
+                UpgradeChecks: {
+                  id: 'p0',
+                  status: 'PASS'
+                }
+              },
+              {
+                UpgradeChecks: {
+                  id: 'p1',
+                  status: 'PASS'
+                }
+              }
+            ]
+          },
+          popup: {
+            primary: 'ok',
+            secondary: 'cancel'
+          },
+          configs: [
+            {
+              name: 'c0'
+            },
+            {
+              name: 'c1'
+            }
+          ],
+          upgradeVersion: 'HDP-2.3.0.0'
+        },
+        result: {
+          primary: 'ok',
+          secondary: 'cancel',
+          header: '&nbsp;'
+        },
+        bodyResult: {
+          failTitle: undefined,
+          failAlert: undefined,
+          warningTitle: undefined,
+          warningAlert: undefined,
+          fails: [],
+          warnings: [],
+          hasConfigsMergeConflicts: true,
+          isAllPassed: false
+        },
+        configsResult: [
+          {
+            name: 'c0'
+          },
+          {
+            name: 'c1'
+          }
+        ],
+        isCallbackExecuted: false,
+        title: 'configs merge conflicts detected, custom buttons'
+      }
+    ];
+
+  beforeEach(function () {
+    isCallbackExecuted = false;
+    sinon.stub(App, 'tooltip', Em.K);
+  });
+
+  afterEach(function () {
+    App.tooltip.restore();
+  });
+
+  cases.forEach(function (item) {
+    it(item.title, function () {
+      var popup = App.showClusterCheckPopup(item.inputData.data, item.inputData.popup, item.inputData.configs, item.inputData.upgradeVersion),
+        popupBody = popup.bodyClass.create();
+      popup.onPrimary();
+      Em.keys(item.result).forEach(function (key) {
+        expect(popup[key]).to.equal(item.result[key]);
+      });
+      Em.keys(item.bodyResult).forEach(function (key) {
+        expect(popupBody[key]).to.eql(item.bodyResult[key]);
+      });
+      expect(isCallbackExecuted).to.equal(item.isCallbackExecuted);
+      if (item.bodyResult.hasConfigsMergeConflicts) {
+        var configsMergeTable = popupBody.configsMergeTable.create();
+        configsMergeTable.didInsertElement();
+        expect(configsMergeTable.configs).to.eql(item.configsResult);
+        expect(App.tooltip.calledOnce).to.be.true;
+        expect(App.tooltip.firstCall.args[1].title).to.equal(item.inputData.upgradeVersion);
+      } else {
+        expect(App.tooltip.calledOnce).to.be.false;
+      }
+    });
+  });
+
+});


[32/50] ambari git commit: AMBARI-13767. LDAP Group Membership not pulled in with FreeIPA/RHELIDM. (Oliver Szabo via rnettleton)

Posted by nc...@apache.org.
AMBARI-13767. LDAP Group Membership not pulled in with FreeIPA/RHELIDM. (Oliver Szabo via rnettleton)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/006f0fe3
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/006f0fe3
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/006f0fe3

Branch: refs/heads/branch-dev-patch-upgrade
Commit: 006f0fe3f8aa7f288d77e3192542e8478cc60933
Parents: a62c4b8
Author: Bob Nettleton <rn...@hortonworks.com>
Authored: Mon Nov 30 19:05:29 2015 -0500
Committer: Bob Nettleton <rn...@hortonworks.com>
Committed: Mon Nov 30 19:05:40 2015 -0500

----------------------------------------------------------------------
 .../security/ldap/AmbariLdapDataPopulator.java  | 99 +++++++++++---------
 .../ldap/AmbariLdapDataPopulatorTest.java       | 66 ++++++++-----
 2 files changed, 97 insertions(+), 68 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/006f0fe3/ambari-server/src/main/java/org/apache/ambari/server/security/ldap/AmbariLdapDataPopulator.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/security/ldap/AmbariLdapDataPopulator.java b/ambari-server/src/main/java/org/apache/ambari/server/security/ldap/AmbariLdapDataPopulator.java
index 103cfcb..3d2685e 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/security/ldap/AmbariLdapDataPopulator.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/security/ldap/AmbariLdapDataPopulator.java
@@ -42,7 +42,6 @@ import org.springframework.ldap.control.PagedResultsDirContextProcessor;
 import org.springframework.ldap.core.AttributesMapper;
 import org.springframework.ldap.core.ContextMapper;
 import org.springframework.ldap.core.DirContextAdapter;
-import org.springframework.ldap.core.DirContextProcessor;
 import org.springframework.ldap.core.LdapTemplate;
 import org.springframework.ldap.core.support.LdapContextSource;
 import org.springframework.ldap.filter.AndFilter;
@@ -176,16 +175,8 @@ public class AmbariLdapDataPopulator {
 
     for (LdapGroupDto groupDto : externalLdapGroupInfo) {
       String groupName = groupDto.getGroupName();
-      if (internalGroupsMap.containsKey(groupName)) {
-        final Group group = internalGroupsMap.get(groupName);
-        if (!group.isLdapGroup()) {
-          batchInfo.getGroupsToBecomeLdap().add(groupName);
-        }
-        internalGroupsMap.remove(groupName);
-      } else {
-        batchInfo.getGroupsToBeCreated().add(groupName);
-      }
-      refreshGroupMembers(batchInfo, groupDto, internalUsersMap, null);
+      addLdapGroup(batchInfo, internalGroupsMap, groupName);
+      refreshGroupMembers(batchInfo, groupDto, internalUsersMap, internalGroupsMap, null);
     }
     for (Entry<String, Group> internalGroup : internalGroupsMap.entrySet()) {
       if (internalGroup.getValue().isLdapGroup()) {
@@ -250,16 +241,8 @@ public class AmbariLdapDataPopulator {
 
     for (LdapGroupDto groupDto : specifiedGroups) {
       String groupName = groupDto.getGroupName();
-      if (internalGroupsMap.containsKey(groupName)) {
-        final Group group = internalGroupsMap.get(groupName);
-        if (!group.isLdapGroup()) {
-          batchInfo.getGroupsToBecomeLdap().add(groupName);
-        }
-        internalGroupsMap.remove(groupName);
-      } else {
-        batchInfo.getGroupsToBeCreated().add(groupName);
-      }
-      refreshGroupMembers(batchInfo, groupDto, internalUsersMap, null);
+      addLdapGroup(batchInfo, internalGroupsMap, groupName);
+      refreshGroupMembers(batchInfo, groupDto, internalUsersMap, internalGroupsMap, null);
     }
 
     return batchInfo;
@@ -317,7 +300,7 @@ public class AmbariLdapDataPopulator {
           batchInfo.getGroupsToBeRemoved().add(group.getGroupName());
         } else {
           LdapGroupDto groupDto = groupDtos.iterator().next();
-          refreshGroupMembers(batchInfo, groupDto, internalUsersMap, null);
+          refreshGroupMembers(batchInfo, groupDto, internalUsersMap, internalGroupsMap, null);
         }
       }
     }
@@ -354,7 +337,8 @@ public class AmbariLdapDataPopulator {
    * @param groupMemberAttributes  set of group member attributes that have already been refreshed
    * @throws AmbariException if group refresh failed
    */
-  protected void refreshGroupMembers(LdapBatchDto batchInfo, LdapGroupDto group, Map<String, User> internalUsers, Set<String> groupMemberAttributes)
+  protected void refreshGroupMembers(LdapBatchDto batchInfo, LdapGroupDto group, Map<String, User> internalUsers,
+                                     Map<String, Group> internalGroupsMap, Set<String> groupMemberAttributes)
       throws AmbariException {
     Set<String> externalMembers = new HashSet<String>();
 
@@ -373,7 +357,8 @@ public class AmbariLdapDataPopulator {
           LdapGroupDto subGroup = getLdapGroupByMemberAttr(memberAttributeValue);
           if (subGroup != null) {
             groupMemberAttributes.add(memberAttributeValue);
-            refreshGroupMembers(batchInfo, subGroup, internalUsers, groupMemberAttributes);
+            addLdapGroup(batchInfo, internalGroupsMap, subGroup.getGroupName());
+            refreshGroupMembers(batchInfo, subGroup, internalUsers, internalGroupsMap, groupMemberAttributes);
           }
         }
       }
@@ -419,7 +404,7 @@ public class AmbariLdapDataPopulator {
     Filter groupObjectFilter = new EqualsFilter(OBJECT_CLASS_ATTRIBUTE,
         ldapServerProperties.getGroupObjectClass());
     Filter groupNameFilter = new LikeFilter(ldapServerProperties.getGroupNamingAttr(), groupName);
-    return getFilteredLdapGroups(groupObjectFilter, groupNameFilter);
+    return getFilteredLdapGroups(ldapServerProperties.getBaseDN(), groupObjectFilter, groupNameFilter);
   }
 
   /**
@@ -432,7 +417,7 @@ public class AmbariLdapDataPopulator {
   protected Set<LdapUserDto> getLdapUsers(String username) {
     Filter userObjectFilter = new EqualsFilter(OBJECT_CLASS_ATTRIBUTE, ldapServerProperties.getUserObjectClass());
     Filter userNameFilter = new LikeFilter(ldapServerProperties.getUsernameAttribute(), username);
-    return getFilteredLdapUsers(userObjectFilter, userNameFilter);
+    return getFilteredLdapUsers(ldapServerProperties.getBaseDN(), userObjectFilter, userNameFilter);
   }
 
   /**
@@ -443,10 +428,16 @@ public class AmbariLdapDataPopulator {
    * @return the user for the given member attribute; null if not found
    */
   protected LdapUserDto getLdapUserByMemberAttr(String memberAttributeValue) {
-    Set<LdapUserDto> filteredLdapUsers = getFilteredLdapUsers(
-        new EqualsFilter(OBJECT_CLASS_ATTRIBUTE, ldapServerProperties.getUserObjectClass()),
-        getMemberFilter(memberAttributeValue));
-
+    Set<LdapUserDto> filteredLdapUsers = new HashSet<LdapUserDto>();
+    if (memberAttributeValue!= null && isMemberAttributeBaseDn(memberAttributeValue)) {
+      Filter filter = new EqualsFilter(OBJECT_CLASS_ATTRIBUTE, ldapServerProperties.getUserObjectClass());
+      filteredLdapUsers = getFilteredLdapUsers(memberAttributeValue, filter);
+    } else {
+      Filter filter = new AndFilter()
+        .and(new EqualsFilter(OBJECT_CLASS_ATTRIBUTE, ldapServerProperties.getUserObjectClass()))
+        .and(new EqualsFilter(ldapServerProperties.getUsernameAttribute(), memberAttributeValue));
+      filteredLdapUsers = getFilteredLdapUsers(ldapServerProperties.getBaseDN(), filter);
+    }
     return (filteredLdapUsers.isEmpty()) ? null : filteredLdapUsers.iterator().next();
   }
 
@@ -458,11 +449,17 @@ public class AmbariLdapDataPopulator {
    * @return the group for the given member attribute; null if not found
    */
   protected LdapGroupDto getLdapGroupByMemberAttr(String memberAttributeValue) {
-    Set<LdapGroupDto> filteredLdapUsers = getFilteredLdapGroups(
+    Set<LdapGroupDto> filteredLdapGroups = new HashSet<LdapGroupDto>();
+    if (memberAttributeValue != null && isMemberAttributeBaseDn(memberAttributeValue)) {
+      Filter filter = new EqualsFilter(OBJECT_CLASS_ATTRIBUTE, ldapServerProperties.getGroupObjectClass());
+      filteredLdapGroups = getFilteredLdapGroups(memberAttributeValue, filter);
+    } else {
+      filteredLdapGroups = getFilteredLdapGroups(ldapServerProperties.getBaseDN(),
         new EqualsFilter(OBJECT_CLASS_ATTRIBUTE, ldapServerProperties.getGroupObjectClass()),
         getMemberFilter(memberAttributeValue));
+    }
 
-    return (filteredLdapUsers.isEmpty()) ? null : filteredLdapUsers.iterator().next();
+    return (filteredLdapGroups.isEmpty()) ? null : filteredLdapGroups.iterator().next();
   }
 
   /**
@@ -481,6 +478,26 @@ public class AmbariLdapDataPopulator {
 
   // Utility methods
 
+  private void addLdapGroup(LdapBatchDto batchInfo, Map<String, Group> internalGroupsMap, String groupName) {
+    if (internalGroupsMap.containsKey(groupName)) {
+      final Group group = internalGroupsMap.get(groupName);
+      if (!group.isLdapGroup()) {
+        batchInfo.getGroupsToBecomeLdap().add(groupName);
+      }
+      internalGroupsMap.remove(groupName);
+    } else {
+      batchInfo.getGroupsToBeCreated().add(groupName);
+    }
+  }
+
+  /**
+   * Determines that the member attribute can be used as a 'dn'
+   */
+  private boolean isMemberAttributeBaseDn(String memberAttributeValue) {
+    return memberAttributeValue.startsWith(ldapServerProperties.getUsernameAttribute() + "=")
+      || memberAttributeValue.startsWith(ldapServerProperties.getGroupNamingAttr() + "=");
+  }
+
   /**
    * Retrieves groups from external LDAP server.
    *
@@ -489,7 +506,7 @@ public class AmbariLdapDataPopulator {
   protected Set<LdapGroupDto> getExternalLdapGroupInfo() {
     EqualsFilter groupObjectFilter = new EqualsFilter(OBJECT_CLASS_ATTRIBUTE,
         ldapServerProperties.getGroupObjectClass());
-    return getFilteredLdapGroups(groupObjectFilter);
+    return getFilteredLdapGroups(ldapServerProperties.getBaseDN(), groupObjectFilter);
   }
 
   // get a filter based on the given member attribute
@@ -500,18 +517,17 @@ public class AmbariLdapDataPopulator {
             or(new EqualsFilter(UID_ATTRIBUTE, memberAttributeValue));
   }
 
-  private Set<LdapGroupDto> getFilteredLdapGroups(Filter...filters) {
+  private Set<LdapGroupDto> getFilteredLdapGroups(String baseDn, Filter...filters) {
     AndFilter andFilter = new AndFilter();
     for (Filter filter : filters) {
       andFilter.and(filter);
     }
-    return getFilteredLdapGroups(andFilter);
+    return getFilteredLdapGroups(baseDn, andFilter);
   }
 
-  private Set<LdapGroupDto> getFilteredLdapGroups(Filter filter) {
+  private Set<LdapGroupDto> getFilteredLdapGroups(String baseDn, Filter filter) {
     final Set<LdapGroupDto> groups = new HashSet<LdapGroupDto>();
     final LdapTemplate ldapTemplate = loadLdapTemplate();
-    String baseDn = ldapServerProperties.getBaseDN();
     ldapTemplate.search(baseDn, filter.encode(), new LdapGroupContextMapper(groups, ldapServerProperties));
     return groups;
   }
@@ -524,21 +540,20 @@ public class AmbariLdapDataPopulator {
   protected Set<LdapUserDto> getExternalLdapUserInfo() {
     EqualsFilter userObjectFilter = new EqualsFilter(OBJECT_CLASS_ATTRIBUTE,
         ldapServerProperties.getUserObjectClass());
-    return getFilteredLdapUsers(userObjectFilter);
+    return getFilteredLdapUsers(ldapServerProperties.getBaseDN(), userObjectFilter);
   }
 
-  private Set<LdapUserDto> getFilteredLdapUsers(Filter...filters) {
+  private Set<LdapUserDto> getFilteredLdapUsers(String baseDn, Filter...filters) {
     AndFilter andFilter = new AndFilter();
     for (Filter filter : filters) {
       andFilter.and(filter);
     }
-    return getFilteredLdapUsers(andFilter);
+    return getFilteredLdapUsers(baseDn, andFilter);
   }
 
-  private Set<LdapUserDto> getFilteredLdapUsers(Filter filter) {
+  private Set<LdapUserDto> getFilteredLdapUsers(String baseDn, Filter filter) {
     final Set<LdapUserDto> users = new HashSet<LdapUserDto>();
     final LdapTemplate ldapTemplate = loadLdapTemplate();
-    String baseDn = ldapServerProperties.getBaseDN();
     PagedResultsDirContextProcessor processor = createPagingProcessor();
     SearchControls searchControls = new SearchControls();
     searchControls.setSearchScope(SearchControls.SUBTREE_SCOPE);

http://git-wip-us.apache.org/repos/asf/ambari/blob/006f0fe3/ambari-server/src/test/java/org/apache/ambari/server/security/ldap/AmbariLdapDataPopulatorTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/security/ldap/AmbariLdapDataPopulatorTest.java b/ambari-server/src/test/java/org/apache/ambari/server/security/ldap/AmbariLdapDataPopulatorTest.java
index 3f4f7b5..be92871 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/security/ldap/AmbariLdapDataPopulatorTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/security/ldap/AmbariLdapDataPopulatorTest.java
@@ -249,7 +249,7 @@ public class AmbariLdapDataPopulatorTest {
     expect(populator.getLdapGroups("group2")).andReturn(Collections.EMPTY_SET);
     LdapGroupDto externalGroup1 = createNiceMock(LdapGroupDto.class);
     LdapBatchDto batchInfo = new LdapBatchDto();
-    populator.refreshGroupMembers(eq(batchInfo), eq(externalGroup1), anyObject(Map.class), anyObject(Set.class));
+    populator.refreshGroupMembers(eq(batchInfo), eq(externalGroup1), anyObject(Map.class), anyObject(Map.class), anyObject(Set.class));
     expectLastCall();
     expect(populator.getLdapGroups("group4")).andReturn(Collections.singleton(externalGroup1));
     expect(populator.getLdapGroups("group5")).andReturn(Collections.EMPTY_SET);
@@ -320,12 +320,12 @@ public class AmbariLdapDataPopulatorTest {
     LdapBatchDto batchInfo = new LdapBatchDto();
     Set<LdapGroupDto> externalGroups = createSet(externalGroup3, externalGroup4);
     for (LdapGroupDto externalGroup : externalGroups) {
-      populator.refreshGroupMembers(eq(batchInfo), eq(externalGroup), anyObject(Map.class), anyObject(Set.class));
+      populator.refreshGroupMembers(eq(batchInfo), eq(externalGroup), anyObject(Map.class), anyObject(Map.class), anyObject(Set.class));
       expectLastCall();
     }
-    populator.refreshGroupMembers(eq(batchInfo), eq(externalGroup1), anyObject(Map.class), anyObject(Set.class));
+    populator.refreshGroupMembers(eq(batchInfo), eq(externalGroup1), anyObject(Map.class), anyObject(Map.class), anyObject(Set.class));
     expectLastCall();
-    populator.refreshGroupMembers(eq(batchInfo), eq(externalGroup2), anyObject(Map.class), anyObject(Set.class));
+    populator.refreshGroupMembers(eq(batchInfo), eq(externalGroup2), anyObject(Map.class), anyObject(Map.class), anyObject(Set.class));
     expectLastCall();
     expect(populator.getLdapGroups("x*")).andReturn(externalGroups);
     expect(populator.getLdapGroups("group1")).andReturn(Collections.singleton(externalGroup1));
@@ -399,10 +399,10 @@ public class AmbariLdapDataPopulatorTest {
     LdapBatchDto batchInfo = new LdapBatchDto();
     Set<LdapGroupDto> externalGroups = createSet(externalGroup3, externalGroup4);
     for (LdapGroupDto externalGroup : externalGroups) {
-      populator.refreshGroupMembers(eq(batchInfo), eq(externalGroup), anyObject(Map.class), anyObject(Set.class));
+      populator.refreshGroupMembers(eq(batchInfo), eq(externalGroup), anyObject(Map.class), anyObject(Map.class), anyObject(Set.class));
       expectLastCall();
     }
-    populator.refreshGroupMembers(eq(batchInfo), eq(externalGroup2), anyObject(Map.class), anyObject(Set.class));
+    populator.refreshGroupMembers(eq(batchInfo), eq(externalGroup2), anyObject(Map.class), anyObject(Map.class), anyObject(Set.class));
     expectLastCall();
     expect(populator.getLdapGroups("x*")).andReturn(externalGroups);
     expect(populator.getLdapGroups("group2")).andReturn(Collections.singleton(externalGroup2));
@@ -473,7 +473,7 @@ public class AmbariLdapDataPopulatorTest {
     LdapBatchDto batchInfo = new LdapBatchDto();
     Set<LdapGroupDto> externalGroups = createSet(externalGroup1, externalGroup2, externalGroup3, externalGroup4);
     for (LdapGroupDto externalGroup : externalGroups) {
-      populator.refreshGroupMembers(eq(batchInfo), eq(externalGroup), anyObject(Map.class), anyObject(Set.class));
+      populator.refreshGroupMembers(eq(batchInfo), eq(externalGroup), anyObject(Map.class), anyObject(Map.class), anyObject(Set.class));
       expectLastCall();
     }
     expect(populator.getLdapGroups("group*")).andReturn(externalGroups);
@@ -603,7 +603,7 @@ public class AmbariLdapDataPopulatorTest {
     LdapBatchDto batchInfo = new LdapBatchDto();
     Set<LdapGroupDto> externalGroups = createSet(externalGroup1, externalGroup2, externalGroup3, externalGroup4);
     for (LdapGroupDto externalGroup : externalGroups) {
-      populator.refreshGroupMembers(eq(batchInfo), eq(externalGroup), anyObject(Map.class), anyObject(Set.class));
+      populator.refreshGroupMembers(eq(batchInfo), eq(externalGroup), anyObject(Map.class), anyObject(Map.class), anyObject(Set.class));
       expectLastCall();
     }
 
@@ -664,7 +664,7 @@ public class AmbariLdapDataPopulatorTest {
     LdapBatchDto batchInfo = new LdapBatchDto();
     Set<LdapGroupDto> externalGroups = createSet(externalGroup1, externalGroup2);
     for (LdapGroupDto externalGroup : externalGroups) {
-      populator.refreshGroupMembers(eq(batchInfo), eq(externalGroup), anyObject(Map.class), anyObject(Set.class));
+      populator.refreshGroupMembers(eq(batchInfo), eq(externalGroup), anyObject(Map.class), anyObject(Map.class), anyObject(Set.class));
       expectLastCall();
     }
     expect(populator.getExternalLdapGroupInfo()).andReturn(externalGroups);
@@ -728,7 +728,7 @@ public class AmbariLdapDataPopulatorTest {
     LdapBatchDto batchInfo = new LdapBatchDto();
     Set<LdapGroupDto> externalGroups = createSet(externalGroup1);
     for (LdapGroupDto externalGroup : externalGroups) {
-      populator.refreshGroupMembers(eq(batchInfo), eq(externalGroup), anyObject(Map.class), anyObject(Set.class));
+      populator.refreshGroupMembers(eq(batchInfo), eq(externalGroup), anyObject(Map.class), anyObject(Map.class), anyObject(Set.class));
       expectLastCall();
     }
     expect(populator.getExternalLdapGroupInfo()).andReturn(externalGroups);
@@ -791,7 +791,7 @@ public class AmbariLdapDataPopulatorTest {
     LdapBatchDto batchInfo = new LdapBatchDto();
     Set<LdapGroupDto> externalGroups = createSet(externalGroup1, externalGroup2);
     for (LdapGroupDto externalGroup : externalGroups) {
-      populator.refreshGroupMembers(eq(batchInfo), eq(externalGroup), anyObject(Map.class), anyObject(Set.class));
+      populator.refreshGroupMembers(eq(batchInfo), eq(externalGroup), anyObject(Map.class), anyObject(Map.class), anyObject(Set.class));
       expectLastCall();
     }
     expect(populator.getExternalLdapGroupInfo()).andReturn(externalGroups);
@@ -1344,19 +1344,28 @@ public class AmbariLdapDataPopulatorTest {
     expect(user3.isLdapUser()).andReturn(true).anyTimes();
     expect(user4.isLdapUser()).andReturn(false).anyTimes();
 
+    Group group1 = createNiceMock(Group.class);
+    Group group2 = createNiceMock(Group.class);
+    expect(group1.isLdapGroup()).andReturn(true).anyTimes();
+    expect(group2.isLdapGroup()).andReturn(true).anyTimes();
+    expect(group1.getGroupName()).andReturn("group1").anyTimes();
+    expect(group2.getGroupName()).andReturn("group2").anyTimes();
     Configuration configuration = createNiceMock(Configuration.class);
     Users users = createNiceMock(Users.class);
     LdapTemplate ldapTemplate = createNiceMock(LdapTemplate.class);
     LdapServerProperties ldapServerProperties = createNiceMock(LdapServerProperties.class);
-
+    expect(ldapServerProperties.getGroupNamingAttr()).andReturn("cn").anyTimes();
+    expect(ldapServerProperties.getUsernameAttribute()).andReturn("uid").anyTimes();
     replay(ldapTemplate, ldapServerProperties, users, configuration);
     replay(user1, user2, user3, user4);
+    replay(group1, group2);
 
     AmbariLdapDataPopulatorTestInstance populator = createMockBuilder(AmbariLdapDataPopulatorTestInstance.class)
-        .addMockedMethod("getLdapUserByMemberAttr")
-        .addMockedMethod("getInternalMembers")
-        .withConstructor(configuration, users)
-        .createNiceMock();
+      .addMockedMethod("getLdapUserByMemberAttr")
+      .addMockedMethod("getLdapGroupByMemberAttr")
+      .addMockedMethod("getInternalMembers")
+      .withConstructor(configuration, users)
+      .createNiceMock();
 
     LdapGroupDto externalGroup = createNiceMock(LdapGroupDto.class);
     expect(externalGroup.getGroupName()).andReturn("group1").anyTimes();
@@ -1380,9 +1389,10 @@ public class AmbariLdapDataPopulatorTest {
     replay(externalUser1, externalUser2, externalUser3, externalUser4);
     expect(populator.getLdapUserByMemberAttr("user1")).andReturn(externalUser1).anyTimes();
     expect(populator.getLdapUserByMemberAttr("user2")).andReturn(externalUser2).anyTimes();
-    expect(populator.getLdapUserByMemberAttr("user4")).andReturn(externalUser3).anyTimes();
+    expect(populator.getLdapUserByMemberAttr("user4")).andReturn(null).anyTimes();
+    expect(populator.getLdapGroupByMemberAttr("user4")).andReturn(externalGroup).anyTimes();
     expect(populator.getLdapUserByMemberAttr("user6")).andReturn(externalUser4).anyTimes();
-    expect(populator.getInternalMembers("group1")).andReturn(internalMembers);
+    expect(populator.getInternalMembers("group1")).andReturn(internalMembers).anyTimes();
     replay(populator);
 
     populator.setLdapTemplate(ldapTemplate);
@@ -1391,29 +1401,31 @@ public class AmbariLdapDataPopulatorTest {
     Map<String, User> internalUsers = new HashMap<String, User>();
     internalUsers.putAll(internalMembers);
     internalUsers.put("user2", user2);
+    Map<String, Group> internalGroups = new HashMap<String, Group>();
+    internalGroups.put("group2", group2);
 
-    populator.refreshGroupMembers(batchInfo, externalGroup, internalUsers, null);
+    populator.refreshGroupMembers(batchInfo, externalGroup, internalUsers, internalGroups, null);
 
     Set<String> groupMembersToAdd = new HashSet<String>();
     for (LdapUserGroupMemberDto ldapUserGroupMemberDto : batchInfo.getMembershipToAdd()) {
       groupMembersToAdd.add(ldapUserGroupMemberDto.getUserName());
     }
-    assertEquals(2, groupMembersToAdd.size());
+    assertEquals(3, groupMembersToAdd.size());
     assertTrue(groupMembersToAdd.contains("user2"));
     assertTrue(groupMembersToAdd.contains("user6"));
     Set<String> groupMembersToRemove = new HashSet<String>();
     for (LdapUserGroupMemberDto ldapUserGroupMemberDto : batchInfo.getMembershipToRemove()) {
       groupMembersToRemove.add(ldapUserGroupMemberDto.getUserName());
     }
-    assertEquals(1, groupMembersToRemove.size());
+    assertEquals(2, groupMembersToRemove.size());
     assertTrue(groupMembersToRemove.contains("user3"));
     assertEquals(1, batchInfo.getUsersToBeCreated().size());
     assertTrue(batchInfo.getUsersToBeCreated().contains("user6"));
-    assertEquals(2, batchInfo.getUsersToBecomeLdap().size());
+    assertEquals(1, batchInfo.getUsersToBecomeLdap().size());
     assertTrue(batchInfo.getUsersToBecomeLdap().contains("user1"));
-    assertTrue(batchInfo.getUsersToBecomeLdap().contains("user4"));
+    assertTrue(!batchInfo.getUsersToBecomeLdap().contains("user4"));
     assertTrue(batchInfo.getGroupsToBecomeLdap().isEmpty());
-    assertTrue(batchInfo.getGroupsToBeCreated().isEmpty());
+    assertEquals(1, batchInfo.getGroupsToBeCreated().size());
     assertTrue(batchInfo.getGroupsToBeRemoved().isEmpty());
     assertTrue(batchInfo.getUsersToBeRemoved().isEmpty());
     verify(populator.loadLdapTemplate(), populator);
@@ -1496,10 +1508,11 @@ public class AmbariLdapDataPopulatorTest {
     expect(ldapServerProperties.getUserObjectClass()).andReturn("objectClass").anyTimes();
     expect(ldapServerProperties.getDnAttribute()).andReturn("dn").anyTimes();
     expect(ldapServerProperties.getBaseDN()).andReturn("baseDN").anyTimes();
+    expect(ldapServerProperties.getUsernameAttribute()).andReturn("uid").anyTimes();
     expect(processor.getCookie()).andReturn(cookie).anyTimes();
     expect(cookie.getCookie()).andReturn(null).anyTimes();
 
-    expect(ldapTemplate.search(eq("baseDN"), eq("(&(objectClass=objectClass)(|(dn=foo)(uid=foo)))"), anyObject(SearchControls.class), capture(contextMapperCapture), eq(processor))).andReturn(list);
+    expect(ldapTemplate.search(eq("baseDN"), eq("(&(objectClass=objectClass)(uid=foo))"), anyObject(SearchControls.class), capture(contextMapperCapture), eq(processor))).andReturn(list);
 
     replay(ldapTemplate, ldapServerProperties, users, configuration, processor, cookie);
 
@@ -1532,10 +1545,11 @@ public class AmbariLdapDataPopulatorTest {
     expect(configuration.getLdapServerProperties()).andReturn(ldapServerProperties).anyTimes();
     expect(ldapServerProperties.isPaginationEnabled()).andReturn(false).anyTimes();
     expect(ldapServerProperties.getUserObjectClass()).andReturn("objectClass").anyTimes();
+    expect(ldapServerProperties.getUsernameAttribute()).andReturn("uid").anyTimes();
     expect(ldapServerProperties.getDnAttribute()).andReturn("dn").anyTimes();
     expect(ldapServerProperties.getBaseDN()).andReturn("baseDN").anyTimes();
 
-    expect(ldapTemplate.search(eq("baseDN"), eq("(&(objectClass=objectClass)(|(dn=foo)(uid=foo)))"), anyObject(SearchControls.class), capture(contextMapperCapture))).andReturn(list);
+    expect(ldapTemplate.search(eq("baseDN"), eq("(&(objectClass=objectClass)(uid=foo))"), anyObject(SearchControls.class), capture(contextMapperCapture))).andReturn(list);
 
     replay(ldapTemplate, ldapServerProperties, users, configuration, processor, cookie);
 


[03/50] ambari git commit: AMBARI-14093. Add ability for some Em.computed macros to work with App.* keys (onechiporenko)

Posted by nc...@apache.org.
AMBARI-14093. Add ability for some Em.computed macros to work with App.* keys (onechiporenko)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/b19bf6ea
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/b19bf6ea
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/b19bf6ea

Branch: refs/heads/branch-dev-patch-upgrade
Commit: b19bf6eab3821de71c5ffcf8c1f87ea1b338a50b
Parents: 6c3cf49
Author: Oleg Nechiporenko <on...@apache.org>
Authored: Fri Nov 27 11:45:06 2015 +0200
Committer: Oleg Nechiporenko <on...@apache.org>
Committed: Fri Nov 27 11:45:06 2015 +0200

----------------------------------------------------------------------
 ambari-web/app/utils/ember_computed.js       | 102 +++--
 ambari-web/test/utils/ember_computed_test.js | 496 +++++++++++++++++++++-
 2 files changed, 552 insertions(+), 46 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/b19bf6ea/ambari-web/app/utils/ember_computed.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/utils/ember_computed.js b/ambari-web/app/utils/ember_computed.js
index c996f32..03ac4fc 100644
--- a/ambari-web/app/utils/ember_computed.js
+++ b/ambari-web/app/utils/ember_computed.js
@@ -24,26 +24,57 @@ var slice = [].slice;
 
 var dataUtils = require('utils/data_manipulation');
 
+/**
+ * Returns hash with values of name properties from the context
+ * If <code>propertyName</code> starts with 'App.', <code>App</code> is used as context, <code>self</code> used otherwise
+ * If some <code>propertyName</code> starts with '!' its value will be inverted
+ *
+ * @param {object} self current context
+ * @param {string[]} propertyNames needed properties
+ * @returns {object} hash with needed values
+ */
 function getProperties(self, propertyNames) {
   var ret = {};
   for (var i = 0; i < propertyNames.length; i++) {
     var propertyName = propertyNames[i];
-    var value;
-    if (propertyName.startsWith('!')) {
-      propertyName = propertyName.substring(1);
-      value = !get(self, propertyName);
-    }
-    else {
-      value = get(self, propertyName);
-    }
+    var shouldBeInverted = propertyName.startsWith('!');
+    propertyName = shouldBeInverted ? propertyName.substr(1) : propertyName;
+    var isApp = propertyName.startsWith('App.');
+    var name = isApp ? propertyName.replace('App.', '') : propertyName;
+    var context = isApp ? App : self;
+    var value = get(context, name);
+    value = shouldBeInverted ? !value : value;
     ret[propertyName] = value;
   }
   return ret;
 }
 
+/**
+ * Returns value of named property from the context
+ * If <code>propertyName</code> starts with 'App.', <code>App</code> is used as context, <code>self</code> used otherwise
+ *
+ * @param {object} self current context
+ * @param {string} propertyName needed property
+ * @returns {*} needed value
+ */
+function smartGet(self, propertyName) {
+  var isApp = propertyName.startsWith('App.');
+  var name = isApp ? propertyName.replace('App.', '') : propertyName;
+  var context = isApp ? App : self;
+  return get(context, name)
+}
+
+/**
+ * Returns list with values of name properties from the context
+ * If <code>propertyName</code> starts with 'App.', <code>App</code> is used as context, <code>self</code> used otherwise
+ *
+ * @param {object} self current context
+ * @param {string[]} propertyNames neded properties
+ * @returns {array} list of needed values
+ */
 function getValues(self, propertyNames) {
   return propertyNames.map(function (propertyName) {
-    return get(self, propertyName);
+    return smartGet(self, propertyName);
   });
 }
 
@@ -87,6 +118,7 @@ function generateComputedWithValues(macro) {
  *
  * A computed property that returns true if the provided dependent property
  * is equal to the given value.
+ * App.*-keys are supported
  * Example*
  * ```javascript
  * var Hamster = Ember.Object.extend({
@@ -108,12 +140,13 @@ function generateComputedWithValues(macro) {
  */
 computed.equal = function (dependentKey, value) {
   return computed(dependentKey, function () {
-    return get(this, dependentKey) === value;
+    return smartGet(this, dependentKey) === value;
   }).cacheable();
 };
 
 /**
  * A computed property that returns true if the provided dependent property is not equal to the given value
+ * App.*-keys are supported
  *
  * @method notEqual
  * @param {string} dependentKey
@@ -122,12 +155,13 @@ computed.equal = function (dependentKey, value) {
  */
 computed.notEqual = function (dependentKey, value) {
   return computed(dependentKey, function () {
-    return get(this, dependentKey) !== value;
+    return smartGet(this, dependentKey) !== value;
   });
 };
 
 /**
  * A computed property that returns true if provided dependent properties are equal to the each other
+ * App.*-keys are supported
  *
  * @method equalProperties
  * @param {string} dependentKey1
@@ -136,12 +170,13 @@ computed.notEqual = function (dependentKey, value) {
  */
 computed.equalProperties = function (dependentKey1, dependentKey2) {
   return computed(dependentKey1, dependentKey2, function () {
-    return get(this, dependentKey1) === get(this, dependentKey2);
+    return smartGet(this, dependentKey1) === smartGet(this, dependentKey2);
   });
 };
 
 /**
  * A computed property that returns true if provided dependent properties are not equal to the each other
+ * App.*-keys are supported
  *
  * @method notEqualProperties
  * @param {string} dependentKey1
@@ -150,7 +185,7 @@ computed.equalProperties = function (dependentKey1, dependentKey2) {
  */
 computed.notEqualProperties = function (dependentKey1, dependentKey2) {
   return computed(dependentKey1, dependentKey2, function () {
-    return get(this, dependentKey1) !== get(this, dependentKey2);
+    return smartGet(this, dependentKey1) !== smartGet(this, dependentKey2);
   });
 };
 
@@ -205,6 +240,7 @@ computed.rejectMany = function (collectionKey, propertyName, valuesToReject) {
 
 /**
  * A computed property that returns trueValue if dependent value is true and falseValue otherwise
+ * App.*-keys are supported
  *
  * @method ifThenElse
  * @param {string} dependentKey
@@ -214,7 +250,7 @@ computed.rejectMany = function (collectionKey, propertyName, valuesToReject) {
  */
 computed.ifThenElse = function (dependentKey, trueValue, falseValue) {
   return computed(dependentKey, function () {
-    return get(this, dependentKey) ? trueValue : falseValue;
+    return smartGet(this, dependentKey) ? trueValue : falseValue;
   });
 };
 
@@ -222,6 +258,7 @@ computed.ifThenElse = function (dependentKey, trueValue, falseValue) {
  * A computed property that is equal to the logical 'and'
  * Takes any number of arguments
  * Returns true if all of them are truly, false - otherwise
+ * App.*-keys are supported
  *
  * @method and
  * @param {...string} dependentKeys
@@ -242,6 +279,7 @@ computed.and = generateComputedWithProperties(function (properties) {
  * A computed property that is equal to the logical 'or'
  * Takes any number of arguments
  * Returns true if at least one of them is truly, false - otherwise
+ * App.*-keys are supported
  *
  * @method or
  * @param {...string} dependentKeys
@@ -261,6 +299,7 @@ computed.or = generateComputedWithProperties(function (properties) {
 /**
  * A computed property that returns sum on the dependent properties values
  * Takes any number of arguments
+ * App.*-keys are supported
  *
  * @method sumProperties
  * @param {...string} dependentKeys
@@ -278,6 +317,7 @@ computed.sumProperties = generateComputedWithProperties(function (properties) {
 
 /**
  * A computed property that returns true if dependent value is greater or equal to the needed value
+ * App.*-keys are supported
  *
  * @method gte
  * @param {string} dependentKey
@@ -286,12 +326,13 @@ computed.sumProperties = generateComputedWithProperties(function (properties) {
  */
 computed.gte = function (dependentKey, value) {
   return computed(dependentKey, function () {
-    return get(this, dependentKey) >= value;
+    return smartGet(this, dependentKey) >= value;
   });
 };
 
 /**
  * A computed property that returns true if first dependent property is greater or equal to the second dependent property
+ * App.*-keys are supported
  *
  * @method gteProperties
  * @param {string} dependentKey1
@@ -300,12 +341,13 @@ computed.gte = function (dependentKey, value) {
  */
 computed.gteProperties = function (dependentKey1, dependentKey2) {
   return computed(dependentKey1, dependentKey2, function () {
-    return get(this, dependentKey1) >= get(this, dependentKey2);
+    return smartGet(this, dependentKey1) >= smartGet(this, dependentKey2);
   });
 };
 
 /**
  * A computed property that returns true if dependent property is less or equal to the needed value
+ * App.*-keys are supported
  *
  * @method lte
  * @param {string} dependentKey
@@ -314,12 +356,13 @@ computed.gteProperties = function (dependentKey1, dependentKey2) {
  */
 computed.lte = function (dependentKey, value) {
   return computed(dependentKey, function () {
-    return get(this, dependentKey) <= value;
+    return smartGet(this, dependentKey) <= value;
   });
 };
 
 /**
  * A computed property that returns true if first dependent property is less or equal to the second dependent property
+ * App.*-keys are supported
  *
  * @method lteProperties
  * @param {string} dependentKey1
@@ -328,12 +371,13 @@ computed.lte = function (dependentKey, value) {
  */
 computed.lteProperties = function (dependentKey1, dependentKey2) {
   return computed(dependentKey1, dependentKey2, function () {
-    return get(this, dependentKey1) <= get(this, dependentKey2);
+    return smartGet(this, dependentKey1) <= smartGet(this, dependentKey2);
   });
 };
 
 /**
  * A computed property that returns true if dependent value is greater than the needed value
+ * App.*-keys are supported
  *
  * @method gt
  * @param {string} dependentKey
@@ -342,12 +386,13 @@ computed.lteProperties = function (dependentKey1, dependentKey2) {
  */
 computed.gt = function (dependentKey, value) {
   return computed(dependentKey, function () {
-    return get(this, dependentKey) > value;
+    return smartGet(this, dependentKey) > value;
   });
 };
 
 /**
  * A computed property that returns true if first dependent property is greater than the second dependent property
+ * App.*-keys are supported
  *
  * @method gtProperties
  * @param {string} dependentKey1
@@ -356,12 +401,13 @@ computed.gt = function (dependentKey, value) {
  */
 computed.gtProperties = function (dependentKey1, dependentKey2) {
   return computed(dependentKey1, dependentKey2, function () {
-    return get(this, dependentKey1) > get(this, dependentKey2);
+    return smartGet(this, dependentKey1) > smartGet(this, dependentKey2);
   });
 };
 
 /**
  * A computed property that returns true if dependent value is less than the needed value
+ * App.*-keys are supported
  *
  * @method lt
  * @param {string} dependentKey
@@ -370,12 +416,13 @@ computed.gtProperties = function (dependentKey1, dependentKey2) {
  */
 computed.lt = function (dependentKey, value) {
   return computed(dependentKey, function () {
-    return get(this, dependentKey) < value;
+    return smartGet(this, dependentKey) < value;
   });
 };
 
 /**
  * A computed property that returns true if first dependent property is less than the second dependent property
+ * App.*-keys are supported
  *
  * @method gtProperties
  * @param {string} dependentKey1
@@ -384,7 +431,7 @@ computed.lt = function (dependentKey, value) {
  */
 computed.ltProperties = function (dependentKey1, dependentKey2) {
   return computed(dependentKey1, dependentKey2, function () {
-    return get(this, dependentKey1) < get(this, dependentKey2);
+    return smartGet(this, dependentKey1) < smartGet(this, dependentKey2);
   });
 };
 
@@ -503,6 +550,7 @@ computed.findBy = function (collectionKey, propertyName, neededValue) {
 /**
  * A computed property that returns value equal to the dependent
  * Should be used as 'short-name' for deeply-nested values
+ * App.*-keys are supported
  *
  * @method alias
  * @param {string} dependentKey
@@ -510,7 +558,7 @@ computed.findBy = function (collectionKey, propertyName, neededValue) {
  */
 computed.alias = function (dependentKey) {
   return computed(dependentKey, function () {
-    return get(this, dependentKey);
+    return smartGet(this, dependentKey);
   });
 };
 
@@ -548,6 +596,7 @@ computed.notExistsIn = function (dependentKey, neededValues) {
  * A computed property that returns result of calculation <code>(dependentProperty1/dependentProperty2 * 100)</code>
  * If accuracy is 0 (by default), result is rounded to integer
  * Otherwise - result is float with provided accuracy
+ * App.*-keys are supported
  *
  * @method percents
  * @param {string} dependentKey1
@@ -560,8 +609,8 @@ computed.percents = function (dependentKey1, dependentKey2, accuracy) {
     accuracy = 0;
   }
   return computed(dependentKey1, dependentKey2, function () {
-    var v1 = Number(get(this, dependentKey1));
-    var v2 = Number(get(this, dependentKey2));
+    var v1 = Number(smartGet(this, dependentKey1));
+    var v2 = Number(smartGet(this, dependentKey2));
     var result = v1 / v2 * 100;
     if (0 === accuracy) {
       return Math.round(result);
@@ -609,6 +658,7 @@ computed.sumBy = function (collectionKey, propertyName) {
 /**
  * A computed property that returns I18n-string formatted with dependent properties
  * Takes at least one argument
+ * App.*-keys are supported
  *
  * @param {string} key key in the I18n-messages
  * @param {...string} dependentKeys
@@ -626,6 +676,7 @@ computed.i18nFormat = generateComputedWithKey(function (key, dependentValues) {
 /**
  * A computed property that returns dependent values joined with separator
  * Takes at least one argument
+ * App.*-keys are supported
  *
  * @param {string} separator
  * @param {...string} dependentKeys
@@ -641,6 +692,7 @@ computed.concat = generateComputedWithKey(function (separator, dependentValues)
  * Based on <code>Ember.isBlank</code>
  * Takes at least 1 argument
  * Dependent values order affects the result
+ * App.*-keys are supported
  *
  * @param {...string} dependentKeys
  * @method {firstNotBlank}

http://git-wip-us.apache.org/repos/asf/ambari/blob/b19bf6ea/ambari-web/test/utils/ember_computed_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/utils/ember_computed_test.js b/ambari-web/test/utils/ember_computed_test.js
index 380982c..4aa3158 100644
--- a/ambari-web/test/utils/ember_computed_test.js
+++ b/ambari-web/test/utils/ember_computed_test.js
@@ -20,12 +20,64 @@ require('utils/ember_computed');
 
 describe('Ember.computed macros', function () {
 
+  beforeEach(function () {
+    App.reopen({
+      someRandomTestingKey: function () {
+        return this.get('someAnotherKey');
+      }.property('someAnotherKey'),
+      someAnotherKey: ''
+    });
+  });
+
+  afterEach(function () {
+    delete App.someAnotherKey;
+    delete App.someRandomTestingKey;
+  });
+
+  describe('#equal', function () {
+
+    beforeEach(function () {
+      App.setProperties({
+        someAnotherKey: '123'
+      });
+      this.obj = Em.Object.create({
+        prop1: '123',
+        prop2: Em.computed.equal('prop1', '123'),
+        prop3: Em.computed.equal('App.someRandomTestingKey', '123')
+      });
+    });
+
+    it('`true` if values are equal', function () {
+      expect(this.obj.get('prop2')).to.be.true;
+    });
+
+    it('`false` if values are not equal', function () {
+      this.obj.set('prop1', '321');
+      expect(this.obj.get('prop2')).to.be.false;
+    });
+
+    it('`prop3` depends on App.* key', function () {
+      expect(this.obj.get('prop3')).to.be.true;
+      App.set('someAnotherKey', '');
+      expect(this.obj.get('prop3')).to.be.false;
+    });
+
+    it('prop3 dependent keys are valid', function () {
+      expect(Em.meta(this.obj).descs.prop3._dependentKeys).to.eql(['App.someRandomTestingKey']);
+    });
+
+  });
+
   describe('#notEqual', function () {
 
     beforeEach(function () {
+      App.setProperties({
+        someAnotherKey: '123'
+      });
       this.obj = Em.Object.create({
         prop1: '123',
-        prop2: Em.computed.notEqual('prop1', '123')
+        prop2: Em.computed.notEqual('prop1', '123'),
+        prop3: Em.computed.notEqual('App.someRandomTestingKey', '123')
       });
     });
 
@@ -38,15 +90,28 @@ describe('Ember.computed macros', function () {
       expect(this.obj.get('prop2')).to.be.true;
     });
 
+    it('`prop3` depends on App.* key', function () {
+      expect(this.obj.get('prop3')).to.be.false;
+      App.set('someAnotherKey', '');
+      expect(this.obj.get('prop3')).to.be.true;
+    });
+
+    it('prop3 dependent keys are valid', function () {
+      expect(Em.meta(this.obj).descs.prop3._dependentKeys).to.eql(['App.someRandomTestingKey']);
+    });
+
   });
 
   describe('#equalProperties', function () {
 
     beforeEach(function () {
+      App.set('someAnotherKey', '123');
       this.obj = Em.Object.create({
         prop1: '123',
         prop2: '123',
-        prop3: Em.computed.equalProperties('prop1', 'prop2')
+        prop3: Em.computed.equalProperties('prop1', 'prop2'),
+        prop4: Em.computed.equalProperties('App.someRandomTestingKey', 'prop2'),
+        prop5: Em.computed.equalProperties('prop1', 'App.someRandomTestingKey')
       });
     });
 
@@ -59,15 +124,38 @@ describe('Ember.computed macros', function () {
       expect(this.obj.get('prop3')).to.be.false;
     });
 
+    it('prop4 depends on App.* key', function () {
+      expect(this.obj.get('prop4')).to.be.true;
+      App.set('someAnotherKey', '');
+      expect(this.obj.get('prop4')).to.be.false;
+    });
+
+    it('prop5 depends on App.* key', function () {
+      expect(this.obj.get('prop5')).to.be.true;
+      App.set('someAnotherKey', '');
+      expect(this.obj.get('prop5')).to.be.false;
+    });
+
+    it('prop4 dependent keys are valid', function () {
+      expect(Em.meta(this.obj).descs.prop4._dependentKeys).to.eql(['App.someRandomTestingKey', 'prop2']);
+    });
+
+    it('prop5 dependent keys are valid', function () {
+      expect(Em.meta(this.obj).descs.prop5._dependentKeys).to.eql(['prop1', 'App.someRandomTestingKey']);
+    });
+
   });
 
   describe('#notEqualProperties', function () {
 
     beforeEach(function () {
+      App.set('someAnotherKey', '123');
       this.obj = Em.Object.create({
         prop1: '123',
         prop2: '123',
-        prop3: Em.computed.notEqualProperties('prop1', 'prop2')
+        prop3: Em.computed.notEqualProperties('prop1', 'prop2'),
+        prop4: Em.computed.notEqualProperties('App.someRandomTestingKey', 'prop2'),
+        prop5: Em.computed.notEqualProperties('prop1', 'App.someRandomTestingKey')
       });
     });
 
@@ -80,14 +168,36 @@ describe('Ember.computed macros', function () {
       expect(this.obj.get('prop3')).to.be.true;
     });
 
+    it('prop4 depends on App.* key', function () {
+      expect(this.obj.get('prop4')).to.be.false;
+      App.set('someAnotherKey', '');
+      expect(this.obj.get('prop4')).to.be.true;
+    });
+
+    it('prop5 depends on App.* key', function () {
+      expect(this.obj.get('prop5')).to.be.false;
+      App.set('someAnotherKey', '');
+      expect(this.obj.get('prop5')).to.be.true;
+    });
+
+    it('prop4 dependent keys are valid', function () {
+      expect(Em.meta(this.obj).descs.prop4._dependentKeys).to.eql(['App.someRandomTestingKey', 'prop2']);
+    });
+
+    it('prop5 dependent keys are valid', function () {
+      expect(Em.meta(this.obj).descs.prop5._dependentKeys).to.eql(['prop1', 'App.someRandomTestingKey']);
+    });
+
   });
 
   describe('#ifThenElse', function () {
 
     beforeEach(function () {
+      App.set('someAnotherKey', true);
       this.obj = Em.Object.create({
         prop1: true,
-        prop2: Em.computed.ifThenElse('prop1', '1', '0')
+        prop2: Em.computed.ifThenElse('prop1', '1', '0'),
+        prop3: Em.computed.ifThenElse('App.someRandomTestingKey', '1', '0')
       });
     });
 
@@ -100,17 +210,32 @@ describe('Ember.computed macros', function () {
       expect(this.obj.get('prop2')).to.equal('0');
     });
 
+    it('prop3 depends on App.* key', function () {
+      expect(this.obj.get('prop3')).to.equal('1');
+      App.set('someAnotherKey', false);
+      expect(this.obj.get('prop3')).to.equal('0');
+    });
+
+    it('prop3 dependent keys are valid', function () {
+      expect(Em.meta(this.obj).descs.prop3._dependentKeys).to.eql(['App.someRandomTestingKey']);
+    });
+
   });
 
   describe('#and', function () {
 
     beforeEach(function () {
+      App.setProperties({
+        someAnotherKey: true
+      });
       this.obj = Em.Object.create({
         prop1: true,
         prop2: true,
         prop3: true,
         prop4: Em.computed.and('prop1', 'prop2', 'prop3'),
-        prop5: Em.computed.and('prop1', '!prop2', '!prop3')
+        prop5: Em.computed.and('prop1', '!prop2', '!prop3'),
+        prop6: Em.computed.and('App.someRandomTestingKey', 'prop1'),
+        prop7: Em.computed.and('!App.someRandomTestingKey', 'prop1')
       });
     });
 
@@ -142,17 +267,45 @@ describe('Ember.computed macros', function () {
       expect(this.obj.get('prop5')).to.be.true;
     });
 
+    it('`prop6` depends on App.* key', function () {
+      expect(this.obj.get('prop6')).to.be.true;
+      App.set('someAnotherKey', false);
+      expect(this.obj.get('prop6')).to.be.false;
+      App.set('someAnotherKey', true);
+      expect(this.obj.get('prop6')).to.be.true;
+    });
+
+    it('prop6 dependent keys are valid', function () {
+      expect(Em.meta(this.obj).descs.prop6._dependentKeys).to.eql(['App.someRandomTestingKey', 'prop1']);
+    });
+
+    it('`prop7` depends on inverted App.* key', function () {
+      expect(this.obj.get('prop7')).to.be.false;
+      App.set('someAnotherKey', false);
+      expect(this.obj.get('prop7')).to.be.true;
+      App.set('someAnotherKey', true);
+      expect(this.obj.get('prop7')).to.be.false;
+    });
+
+    it('prop7 dependent keys are valid', function () {
+      expect(Em.meta(this.obj).descs.prop7._dependentKeys).to.eql(['App.someRandomTestingKey', 'prop1']);
+    });
+
   });
 
   describe('#or', function () {
-
     beforeEach(function () {
+      App.setProperties({
+        someAnotherKey: true
+      });
       this.obj = Em.Object.create({
         prop1: false,
         prop2: false,
         prop3: false,
         prop4: Em.computed.or('prop1', 'prop2', 'prop3'),
-        prop5: Em.computed.or('!prop1', '!prop2', '!prop3')
+        prop5: Em.computed.or('!prop1', '!prop2', '!prop3'),
+        prop6: Em.computed.or('App.someRandomTestingKey', 'prop1'),
+        prop7: Em.computed.or('!App.someRandomTestingKey', 'prop1')
       });
     });
 
@@ -185,16 +338,44 @@ describe('Ember.computed macros', function () {
       expect(this.obj.get('prop5')).to.be.false;
     });
 
+    it('`prop6` depends on App.* key', function () {
+      expect(this.obj.get('prop6')).to.be.true;
+      App.set('someAnotherKey', false);
+      expect(this.obj.get('prop6')).to.be.false;
+      App.set('someAnotherKey', true);
+      expect(this.obj.get('prop6')).to.be.true;
+    });
+
+    it('prop6 dependent keys are valid', function () {
+      expect(Em.meta(this.obj).descs.prop6._dependentKeys).to.eql(['App.someRandomTestingKey', 'prop1']);
+    });
+
+    it('`prop7` depends on inverted App.* key', function () {
+      expect(this.obj.get('prop7')).to.be.false;
+      App.set('someAnotherKey', false);
+      expect(this.obj.get('prop7')).to.be.true;
+      App.set('someAnotherKey', true);
+      expect(this.obj.get('prop7')).to.be.false;
+    });
+
+    it('prop7 dependent keys are valid', function () {
+      expect(Em.meta(this.obj).descs.prop7._dependentKeys).to.eql(['App.someRandomTestingKey', 'prop1']);
+    });
+
   });
 
   describe('#sumProperties', function () {
 
     beforeEach(function () {
+      App.setProperties({
+        someAnotherKey: 5
+      });
       this.obj = Em.Object.create({
         prop1: 1,
         prop2: 2,
         prop3: 3,
-        prop4: Em.computed.sumProperties('prop1', 'prop2', 'prop3')
+        prop4: Em.computed.sumProperties('prop1', 'prop2', 'prop3'),
+        prop5: Em.computed.sumProperties('prop1', 'prop2', 'App.someRandomTestingKey')
       });
     });
 
@@ -222,14 +403,26 @@ describe('Ember.computed macros', function () {
       expect(this.obj.get('prop4')).to.equal(5);
     });
 
+    it('`prop5` depends on App.* key', function () {
+      expect(this.obj.get('prop5')).to.equal(8);
+      App.set('someAnotherKey', 6);
+      expect(this.obj.get('prop5')).to.equal(9);
+    });
+
+    it('prop5 dependent keys are valid', function () {
+      expect(Em.meta(this.obj).descs.prop5._dependentKeys).to.eql(['prop1', 'prop2', 'App.someRandomTestingKey']);
+    });
+
   });
 
   describe('#gte', function () {
 
     beforeEach(function () {
+      App.set('someAnotherKey', 4);
       this.obj = Em.Object.create({
         prop1: 2,
-        prop2: Em.computed.gte('prop1', 3)
+        prop2: Em.computed.gte('prop1', 3),
+        prop3: Em.computed.gte('App.someRandomTestingKey', 3)
       });
     });
 
@@ -247,15 +440,30 @@ describe('Ember.computed macros', function () {
       expect(this.obj.get('prop2')).to.be.true;
     });
 
+    it('prop3 depends on App.* key', function () {
+      expect(this.obj.get('prop3')).to.be.true;
+      App.set('someAnotherKey', 3);
+      expect(this.obj.get('prop3')).to.be.true;
+      App.set('someAnotherKey', 2);
+      expect(this.obj.get('prop3')).to.be.false;
+    });
+
+    it('prop3 dependent keys are valid', function () {
+      expect(Em.meta(this.obj).descs.prop3._dependentKeys).to.eql(['App.someRandomTestingKey']);
+    });
+
   });
 
   describe('#gteProperties', function () {
 
     beforeEach(function () {
+      App.set('someAnotherKey', 4);
       this.obj = Em.Object.create({
         prop1: 2,
         prop2: 3,
-        prop3: Em.computed.gteProperties('prop1', 'prop2')
+        prop3: Em.computed.gteProperties('prop1', 'prop2'),
+        prop4: Em.computed.gteProperties('App.someRandomTestingKey', 'prop2'),
+        prop5: Em.computed.gteProperties('prop1', 'App.someRandomTestingKey')
       });
     });
 
@@ -273,14 +481,40 @@ describe('Ember.computed macros', function () {
       expect(this.obj.get('prop3')).to.be.true;
     });
 
+    it('prop4 depends on App.* key', function () {
+      expect(this.obj.get('prop4')).to.be.true;
+      App.set('someAnotherKey', 3);
+      expect(this.obj.get('prop4')).to.be.true;
+      App.set('someAnotherKey', 2);
+      expect(this.obj.get('prop4')).to.be.false;
+    });
+
+    it('prop5 depends on App.* key', function () {
+      expect(this.obj.get('prop5')).to.be.false;
+      App.set('someAnotherKey', 2);
+      expect(this.obj.get('prop5')).to.be.true;
+      App.set('someAnotherKey', 1);
+      expect(this.obj.get('prop5')).to.be.true;
+    });
+
+    it('prop4 dependent keys are valid', function () {
+      expect(Em.meta(this.obj).descs.prop4._dependentKeys).to.eql(['App.someRandomTestingKey', 'prop2']);
+    });
+
+    it('prop5 dependent keys are valid', function () {
+      expect(Em.meta(this.obj).descs.prop5._dependentKeys).to.eql(['prop1', 'App.someRandomTestingKey']);
+    });
+
   });
 
   describe('#lte', function () {
 
     beforeEach(function () {
+      App.set('someAnotherKey', 0);
       this.obj = Em.Object.create({
         prop1: 2,
-        prop2: Em.computed.lte('prop1', 1)
+        prop2: Em.computed.lte('prop1', 1),
+        prop3: Em.computed.lte('App.someRandomTestingKey', 1)
       });
     });
 
@@ -298,15 +532,30 @@ describe('Ember.computed macros', function () {
       expect(this.obj.get('prop2')).to.be.true;
     });
 
+    it('prop3 depends on App.* key', function () {
+      expect(this.obj.get('prop3')).to.be.true;
+      App.set('someAnotherKey', 1);
+      expect(this.obj.get('prop3')).to.be.true;
+      App.set('someAnotherKey', 2);
+      expect(this.obj.get('prop3')).to.be.false;
+    });
+
+    it('prop3 dependent keys are valid', function () {
+      expect(Em.meta(this.obj).descs.prop3._dependentKeys).to.eql(['App.someRandomTestingKey']);
+    });
+
   });
 
   describe('#lteProperties', function () {
 
     beforeEach(function () {
+      App.set('someAnotherKey', 1);
       this.obj = Em.Object.create({
         prop1: 2,
         prop2: 1,
-        prop3: Em.computed.lteProperties('prop1', 'prop2')
+        prop3: Em.computed.lteProperties('prop1', 'prop2'),
+        prop4: Em.computed.lteProperties('App.someRandomTestingKey', 'prop2'),
+        prop5: Em.computed.lteProperties('prop1', 'App.someRandomTestingKey')
       });
     });
 
@@ -324,14 +573,40 @@ describe('Ember.computed macros', function () {
       expect(this.obj.get('prop3')).to.be.true;
     });
 
+    it('prop4 depends on App.* key', function () {
+      expect(this.obj.get('prop4')).to.be.true;
+      App.set('someAnotherKey', 0);
+      expect(this.obj.get('prop4')).to.be.true;
+      App.set('someAnotherKey', 2);
+      expect(this.obj.get('prop4')).to.be.false;
+    });
+
+    it('prop5 depends on App.* key', function () {
+      expect(this.obj.get('prop5')).to.be.false;
+      App.set('someAnotherKey', 2);
+      expect(this.obj.get('prop5')).to.be.true;
+      App.set('someAnotherKey', 3);
+      expect(this.obj.get('prop5')).to.be.true;
+    });
+
+    it('prop4 dependent keys are valid', function () {
+      expect(Em.meta(this.obj).descs.prop4._dependentKeys).to.eql(['App.someRandomTestingKey', 'prop2']);
+    });
+
+    it('prop5 dependent keys are valid', function () {
+      expect(Em.meta(this.obj).descs.prop5._dependentKeys).to.eql(['prop1', 'App.someRandomTestingKey']);
+    });
+
   });
 
   describe('#gt', function () {
 
     beforeEach(function () {
+      App.set('someAnotherKey', 4);
       this.obj = Em.Object.create({
         prop1: 2,
-        prop2: Em.computed.gt('prop1', 3)
+        prop2: Em.computed.gt('prop1', 3),
+        prop3: Em.computed.gt('App.someRandomTestingKey', 3)
       });
     });
 
@@ -349,15 +624,30 @@ describe('Ember.computed macros', function () {
       expect(this.obj.get('prop2')).to.be.true;
     });
 
+    it('prop3 depends on App.* key', function () {
+      expect(this.obj.get('prop3')).to.be.true;
+      App.set('someAnotherKey', 3);
+      expect(this.obj.get('prop3')).to.be.false;
+      App.set('someAnotherKey', 2);
+      expect(this.obj.get('prop3')).to.be.false;
+    });
+
+    it('prop3 dependent keys are valid', function () {
+      expect(Em.meta(this.obj).descs.prop3._dependentKeys).to.eql(['App.someRandomTestingKey']);
+    });
+
   });
 
   describe('#gtProperties', function () {
 
     beforeEach(function () {
+      App.set('someAnotherKey', 4);
       this.obj = Em.Object.create({
         prop1: 2,
         prop2: 3,
-        prop3: Em.computed.gtProperties('prop1', 'prop2')
+        prop3: Em.computed.gtProperties('prop1', 'prop2'),
+        prop4: Em.computed.gtProperties('App.someRandomTestingKey', 'prop2'),
+        prop5: Em.computed.gtProperties('prop1', 'App.someRandomTestingKey')
       });
     });
 
@@ -375,14 +665,40 @@ describe('Ember.computed macros', function () {
       expect(this.obj.get('prop3')).to.be.true;
     });
 
+    it('prop4 depends on App.* key', function () {
+      expect(this.obj.get('prop4')).to.be.true;
+      App.set('someAnotherKey', 3);
+      expect(this.obj.get('prop4')).to.be.false;
+      App.set('someAnotherKey', 2);
+      expect(this.obj.get('prop4')).to.be.false;
+    });
+
+    it('prop5 depends on App.* key', function () {
+      expect(this.obj.get('prop5')).to.be.false;
+      App.set('someAnotherKey', 2);
+      expect(this.obj.get('prop5')).to.be.false;
+      App.set('someAnotherKey', 1);
+      expect(this.obj.get('prop5')).to.be.true;
+    });
+
+    it('prop4 dependent keys are valid', function () {
+      expect(Em.meta(this.obj).descs.prop4._dependentKeys).to.eql(['App.someRandomTestingKey', 'prop2']);
+    });
+
+    it('prop5 dependent keys are valid', function () {
+      expect(Em.meta(this.obj).descs.prop5._dependentKeys).to.eql(['prop1', 'App.someRandomTestingKey']);
+    });
+
   });
 
   describe('#lt', function () {
 
     beforeEach(function () {
+      App.set('someAnotherKey', 0);
       this.obj = Em.Object.create({
         prop1: 2,
-        prop2: Em.computed.lt('prop1', 1)
+        prop2: Em.computed.lt('prop1', 1),
+        prop3: Em.computed.lt('App.someRandomTestingKey', 1)
       });
     });
 
@@ -400,15 +716,30 @@ describe('Ember.computed macros', function () {
       expect(this.obj.get('prop2')).to.be.true;
     });
 
+    it('prop3 depends on App.* key', function () {
+      expect(this.obj.get('prop3')).to.be.true;
+      App.set('someAnotherKey', 1);
+      expect(this.obj.get('prop3')).to.be.false;
+      App.set('someAnotherKey', 2);
+      expect(this.obj.get('prop3')).to.be.false;
+    });
+
+    it('prop3 dependent keys are valid', function () {
+      expect(Em.meta(this.obj).descs.prop3._dependentKeys).to.eql(['App.someRandomTestingKey']);
+    });
+
   });
 
   describe('#ltProperties', function () {
 
     beforeEach(function () {
+      App.set('someAnotherKey', 1);
       this.obj = Em.Object.create({
         prop1: 2,
         prop2: 1,
-        prop3: Em.computed.ltProperties('prop1', 'prop2')
+        prop3: Em.computed.ltProperties('prop1', 'prop2'),
+        prop4: Em.computed.ltProperties('App.someRandomTestingKey', 'prop2'),
+        prop5: Em.computed.ltProperties('prop1', 'App.someRandomTestingKey')
       });
     });
 
@@ -426,6 +757,30 @@ describe('Ember.computed macros', function () {
       expect(this.obj.get('prop3')).to.be.true;
     });
 
+    it('prop4 depends on App.* key', function () {
+      expect(this.obj.get('prop4')).to.be.false;
+      App.set('someAnotherKey', 0);
+      expect(this.obj.get('prop4')).to.be.true;
+      App.set('someAnotherKey', 2);
+      expect(this.obj.get('prop4')).to.be.false;
+    });
+
+    it('prop5 depends on App.* key', function () {
+      expect(this.obj.get('prop5')).to.be.false;
+      App.set('someAnotherKey', 2);
+      expect(this.obj.get('prop5')).to.be.false;
+      App.set('someAnotherKey', 3);
+      expect(this.obj.get('prop5')).to.be.true;
+    });
+
+    it('prop4 dependent keys are valid', function () {
+      expect(Em.meta(this.obj).descs.prop4._dependentKeys).to.eql(['App.someRandomTestingKey', 'prop2']);
+    });
+
+    it('prop5 dependent keys are valid', function () {
+      expect(Em.meta(this.obj).descs.prop5._dependentKeys).to.eql(['prop1', 'App.someRandomTestingKey']);
+    });
+
   });
 
   describe('#match', function () {
@@ -581,6 +936,7 @@ describe('Ember.computed macros', function () {
   describe('#alias', function() {
 
     beforeEach(function () {
+      App.set('someAnotherKey', {a: {b: 1}});
       this.obj = Em.Object.create({
         prop1: {
           a: {
@@ -589,7 +945,8 @@ describe('Ember.computed macros', function () {
             }
           }
         },
-        prop2: Em.computed.alias('prop1.a.b.c')
+        prop2: Em.computed.alias('prop1.a.b.c'),
+        prop3: Em.computed.alias('App.someAnotherKey.a.b')
       })
     });
 
@@ -602,6 +959,16 @@ describe('Ember.computed macros', function () {
       expect(this.obj.get('prop2')).to.equal(2);
     });
 
+    it('prop3 depends on App.* key', function () {
+      expect(this.obj.get('prop3')).to.equal(1);
+      App.set('someAnotherKey.a.b', 4);
+      expect(this.obj.get('prop3')).to.equal(4);
+    });
+
+    it('prop3 dependent keys are valid', function () {
+      expect(Em.meta(this.obj).descs.prop3._dependentKeys).to.eql(['App.someAnotherKey.a.b']);
+    });
+
   });
 
   describe('#existsIn', function () {
@@ -632,14 +999,24 @@ describe('Ember.computed macros', function () {
   describe('#percents', function () {
 
     beforeEach(function () {
+      App.setProperties({
+        p1: 25,
+        p2: 100
+      });
       this.obj = Em.Object.create({
         prop1: 10,
         prop2: 25,
         prop3: Em.computed.percents('prop1', 'prop2'),
-        prop4: Em.computed.percents('prop1', 'prop2', 2)
+        prop4: Em.computed.percents('prop1', 'prop2', 2),
+        prop5: Em.computed.percents('App.p1', 'App.p2', 1)
       });
     });
 
+    afterEach(function () {
+      delete App.p1;
+      delete App.p2;
+    });
+
     it('should calculate percents', function () {
       expect(this.obj.get('prop3')).to.equal(40);
       expect(this.obj.get('prop4')).to.equal(40.00);
@@ -671,6 +1048,18 @@ describe('Ember.computed macros', function () {
       expect(this.obj.get('prop4')).to.equal(10.60);
     });
 
+    it('prop5 depends on App.* keys', function () {
+      expect(this.obj.get('prop5')).to.equal(25.0);
+      App.set('p2', 50);
+      expect(this.obj.get('prop5')).to.equal(50.0);
+      App.set('p1', 10);
+      expect(this.obj.get('prop5')).to.equal(20.0);
+    });
+
+    it('prop4 dependent keys are valid', function () {
+      expect(Em.meta(this.obj).descs.prop5._dependentKeys).to.eql(['App.p1', 'App.p2']);
+    });
+
   });
 
   describe('#formatRole', function () {
@@ -730,11 +1119,21 @@ describe('Ember.computed macros', function () {
       expect(this.obj.get('prop2')).to.equal(10);
     });
 
+    it('0 for empty collection', function () {
+      this.obj.set('prop1', []);
+      expect(this.obj.get('prop2')).to.equal(0);
+    });
+
   });
 
   describe('#i18nFormat', function () {
 
     beforeEach(function () {
+
+      App.setProperties({
+        someAnotherKey: 'some value'
+      });
+
       sinon.stub(Em.I18n, 't', function (key) {
         var msgs = {
           key1: '{0} {1} {2}'
@@ -746,7 +1145,8 @@ describe('Ember.computed macros', function () {
         prop2: 'cba',
         prop3: 'aaa',
         prop4: Em.computed.i18nFormat('key1', 'prop1', 'prop2', 'prop3'),
-        prop5: Em.computed.i18nFormat('not_existing_key', 'prop1', 'prop2', 'prop3')
+        prop5: Em.computed.i18nFormat('not_existing_key', 'prop1', 'prop2', 'prop3'),
+        prop6: Em.computed.i18nFormat('key1', 'App.someRandomTestingKey', 'prop2', 'prop3')
       });
     });
 
@@ -771,16 +1171,33 @@ describe('Ember.computed macros', function () {
       expect(this.obj.get('prop5')).to.equal('');
     });
 
+    it('`prop6` depends on App.* key', function () {
+      expect(this.obj.get('prop6')).to.equal('some value cba aaa');
+      App.set('someAnotherKey', '');
+      expect(this.obj.get('prop6')).to.equal(' cba aaa');
+    });
+
+    it('prop6 dependent keys are valid', function () {
+      expect(Em.meta(this.obj).descs.prop6._dependentKeys).to.eql(['App.someRandomTestingKey', 'prop2', 'prop3']);
+    });
+
   });
 
   describe('#concat', function () {
 
     beforeEach(function () {
+
+      App.setProperties({
+        someAnotherKey: 'some value'
+      });
+
       this.obj = Em.Object.create({
         prop1: 'abc',
         prop2: 'cba',
         prop3: 'aaa',
-        prop4: Em.computed.concat(' ', 'prop1', 'prop2', 'prop3')
+        prop4: Em.computed.concat(' ', 'prop1', 'prop2', 'prop3'),
+        prop5: Em.computed.concat(' ', 'App.someRandomTestingKey', 'prop2', 'prop3'),
+        prop6: Em.computed.concat(' ')
       });
     });
 
@@ -793,6 +1210,20 @@ describe('Ember.computed macros', function () {
       expect(this.obj.get('prop4')).to.equal('aaa cba aaa');
     });
 
+    it('`prop5` depends on App.* key', function () {
+      expect(this.obj.get('prop5')).to.equal('some value cba aaa');
+      App.set('someAnotherKey', '');
+      expect(this.obj.get('prop5')).to.equal(' cba aaa');
+    });
+
+    it('prop5 dependent keys are valid', function () {
+      expect(Em.meta(this.obj).descs.prop5._dependentKeys).to.eql(['App.someRandomTestingKey', 'prop2', 'prop3']);
+    });
+
+    it('prop6 without dependent keys', function () {
+      expect(this.obj.get('prop6')).to.equal('');
+    });
+
   });
 
   describe('#notExistsIn', function () {
@@ -823,11 +1254,18 @@ describe('Ember.computed macros', function () {
   describe('#firstNotBlank', function () {
 
     beforeEach(function () {
+
+      App.setProperties({
+        someAnotherKey: 'NOT-EMPTY-STRING'
+      });
+
       this.obj = Em.Object.create({
         prop1: '',
         prop2: null,
         prop3: '1234',
-        prop4: Em.computed.firstNotBlank('prop1', 'prop2', 'prop3')
+        prop4: Em.computed.firstNotBlank('prop1', 'prop2', 'prop3'),
+        prop5: Em.computed.firstNotBlank('prop1', 'App.someRandomTestingKey', 'prop3'),
+        prop6: Em.computed.firstNotBlank('prop1', 'prop2')
       })
     });
 
@@ -850,6 +1288,22 @@ describe('Ember.computed macros', function () {
       expect(this.obj.get('prop4')).to.equal('prop1 is used');
     });
 
+    it('`prop5` depends on App.* key', function () {
+      expect(this.obj.get('prop5')).to.equal('NOT-EMPTY-STRING');
+      App.set('someAnotherKey', '!!!!!!!');
+      expect(this.obj.get('prop5')).to.equal('!!!!!!!');
+      App.set('someAnotherKey', null);
+      expect(this.obj.get('prop5')).to.equal('1234');
+    });
+
+    it('prop5 dependent keys are valid', function () {
+      expect(Em.meta(this.obj).descs.prop5._dependentKeys).to.eql(['prop1', 'App.someRandomTestingKey', 'prop3']);
+    });
+
+    it('prop6 depends on blank values', function () {
+      expect(this.obj.get('prop6')).to.be.null;
+    });
+
   });
 
 });
\ No newline at end of file


[12/50] ambari git commit: AMBARI-14092. Set yarn.resourcemanager.proxy-user-privileges.enabled to false in yarn-site when TDE is enabled (gautam via smohanty)

Posted by nc...@apache.org.
AMBARI-14092. Set yarn.resourcemanager.proxy-user-privileges.enabled to false in yarn-site when TDE is enabled (gautam via smohanty)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/bea418b7
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/bea418b7
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/bea418b7

Branch: refs/heads/branch-dev-patch-upgrade
Commit: bea418b75ed3ee9b2e4fa12d29a034bdfff96b4e
Parents: d855386
Author: Sumit Mohanty <sm...@hortonworks.com>
Authored: Fri Nov 27 09:11:25 2015 -0800
Committer: Sumit Mohanty <sm...@hortonworks.com>
Committed: Fri Nov 27 09:12:23 2015 -0800

----------------------------------------------------------------------
 .../stacks/HDP/2.3/services/stack_advisor.py    | 21 +++++++++++++++++++-
 1 file changed, 20 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/bea418b7/ambari-server/src/main/resources/stacks/HDP/2.3/services/stack_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/services/stack_advisor.py b/ambari-server/src/main/resources/stacks/HDP/2.3/services/stack_advisor.py
index a5e419a..5047e9e 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/services/stack_advisor.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/services/stack_advisor.py
@@ -552,6 +552,7 @@ class HDP23StackAdvisor(HDP22StackAdvisor):
     super(HDP23StackAdvisor, self).recommendYARNConfigurations(configurations, clusterData, services, hosts)
     putYarnSiteProperty = self.putProperty(configurations, "yarn-site", services)
     putYarnSitePropertyAttributes = self.putPropertyAttribute(configurations, "yarn-site")
+    servicesList = [service["StackServices"]["service_name"] for service in services["services"]]
 
     if "tez-site" not in services["configurations"]:
       putYarnSiteProperty('yarn.timeline-service.entity-group-fs-store.group-id-plugin-classes', '')
@@ -575,6 +576,10 @@ class HDP23StackAdvisor(HDP22StackAdvisor):
     else:
       putYarnSitePropertyAttributes('yarn.authorization-provider', 'delete', 'true')
 
+    if 'RANGER_KMS' in servicesList and 'KERBEROS' in servicesList:
+      if 'yarn-site' in services["configurations"] and 'yarn.resourcemanager.proxy-user-privileges.enabled' in services["configurations"]["yarn-site"]["properties"]:
+        putYarnSiteProperty('yarn.resourcemanager.proxy-user-privileges.enabled', 'false')
+
   def getServiceConfigurationValidators(self):
       parentValidators = super(HDP23StackAdvisor, self).getServiceConfigurationValidators()
       childValidators = {
@@ -582,7 +587,8 @@ class HDP23StackAdvisor(HDP22StackAdvisor):
         "HIVE": {"hiveserver2-site": self.validateHiveServer2Configurations,
                  "hive-site": self.validateHiveConfigurations},
         "HBASE": {"hbase-site": self.validateHBASEConfigurations},
-        "KAKFA": {"kafka-broker": self.validateKAFKAConfigurations}
+        "KAKFA": {"kafka-broker": self.validateKAFKAConfigurations},
+        "YARN": {"yarn-site": self.validateYARNConfigurations}
       }
       self.mergeValidators(parentValidators, childValidators)
       return parentValidators
@@ -757,6 +763,19 @@ class HDP23StackAdvisor(HDP22StackAdvisor):
 
     return self.toConfigurationValidationProblems(validationItems, "kafka-broker")
 
+  def validateYARNConfigurations(self, properties, recommendedDefaults, configurations, services, hosts):
+    yarn_site = properties
+    validationItems = []
+    servicesList = [service["StackServices"]["service_name"] for service in services["services"]]
+    if 'RANGER_KMS' in servicesList and 'KERBEROS' in servicesList:
+      yarn_resource_proxy_enabled = yarn_site['yarn.resourcemanager.proxy-user-privileges.enabled']
+      if yarn_resource_proxy_enabled.lower() == 'true':
+        validationItems.append({"config-name": 'yarn.resourcemanager.proxy-user-privileges.enabled',
+          "item": self.getWarnItem("If Ranger KMS service is installed set yarn.resourcemanager.proxy-user-privileges.enabled "\
+          "property value as false under yarn-site"
+        )})
+
+    return self.toConfigurationValidationProblems(validationItems, "yarn-site")
 
   def isComponentUsingCardinalityForLayout(self, componentName):
     return componentName in ['NFS_GATEWAY', 'PHOENIX_QUERY_SERVER', 'SPARK_THRIFTSERVER']


[18/50] ambari git commit: AMBARI-14010. Hive view doesn't show precision of types that support it. (Nitiraj Rathore via Jaimin)

Posted by nc...@apache.org.
AMBARI-14010. Hive view doesn't show precision of types that support it. (Nitiraj Rathore via Jaimin)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/85c75155
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/85c75155
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/85c75155

Branch: refs/heads/branch-dev-patch-upgrade
Commit: 85c751557746e224bd67ee153d005d31ea2d4b80
Parents: c1b5205
Author: Jaimin Jetly <ja...@hortonworks.com>
Authored: Sun Nov 29 21:54:08 2015 -0800
Committer: Jaimin Jetly <ja...@hortonworks.com>
Committed: Sun Nov 29 21:54:08 2015 -0800

----------------------------------------------------------------------
 .../hive-web/app/helpers/format-column-type.js  | 39 ++++++++++++++++++++
 .../ui/hive-web/app/services/database.js        | 12 +++---
 .../hive-web/app/templates/databases-tree.hbs   |  2 +-
 3 files changed, 47 insertions(+), 6 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/85c75155/contrib/views/hive/src/main/resources/ui/hive-web/app/helpers/format-column-type.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/resources/ui/hive-web/app/helpers/format-column-type.js b/contrib/views/hive/src/main/resources/ui/hive-web/app/helpers/format-column-type.js
new file mode 100644
index 0000000..8566b5e
--- /dev/null
+++ b/contrib/views/hive/src/main/resources/ui/hive-web/app/helpers/format-column-type.js
@@ -0,0 +1,39 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ used to format the precision and scale of type in database's table's columns
+**/
+
+import Ember from "ember";
+
+var columnTypeFormatter = function(column) {
+  let type = column.type;
+  let ext = type;
+  if( type === "VARCHAR" || type === "CHAR" || type == "DECIMAL"  ) {
+      ext += '(' + column.precision;
+    if (type == "DECIMAL") {
+        ext += "," + column.scale;
+    }
+    ext += ")";
+  }
+
+  return ext;
+};
+
+export default Ember.Handlebars.makeBoundHelper(columnTypeFormatter);

http://git-wip-us.apache.org/repos/asf/ambari/blob/85c75155/contrib/views/hive/src/main/resources/ui/hive-web/app/services/database.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/resources/ui/hive-web/app/services/database.js b/contrib/views/hive/src/main/resources/ui/hive-web/app/services/database.js
index 58789a3..97f5134 100644
--- a/contrib/views/hive/src/main/resources/ui/hive-web/app/services/database.js
+++ b/contrib/views/hive/src/main/resources/ui/hive-web/app/services/database.js
@@ -69,7 +69,7 @@ export default Ember.Service.extend({
               table.get('name');
 
     url += '.page?searchId&count=' + this.get('pageCount');
-    url += '&columns=3,5';
+    url += '&columns=3,5,6,8';
 
     if (searchTerm) {
       url += '&searchId=searchColumns' + '&like=' + searchTerm;
@@ -86,10 +86,12 @@ export default Ember.Service.extend({
         var columns;
 
         columns = data.rows.map(function (row) {
-          return Ember.Object.create({
-            name: row[0],
-            type: row[1]
-          });
+            return Ember.Object.create({
+              name: row[0],
+              type: row[1],
+              precision : row[2],
+              scale : row[3]
+            });
         });
 
         defer.resolve({

http://git-wip-us.apache.org/repos/asf/ambari/blob/85c75155/contrib/views/hive/src/main/resources/ui/hive-web/app/templates/databases-tree.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/resources/ui/hive-web/app/templates/databases-tree.hbs b/contrib/views/hive/src/main/resources/ui/hive-web/app/templates/databases-tree.hbs
index 45a9b7f..f1fdbf7 100644
--- a/contrib/views/hive/src/main/resources/ui/hive-web/app/templates/databases-tree.hbs
+++ b/contrib/views/hive/src/main/resources/ui/hive-web/app/templates/databases-tree.hbs
@@ -28,7 +28,7 @@
                   {{#each column in table.visibleColumns}}
                     <div>
                       <strong>{{column.name}}</strong>
-                      <span class="pull-right">{{column.type}}</span>
+                      <span class="pull-right">{{format-column-type column}}</span>
                     </div>
                   {{/each}}
                   {{#if table.canGetNextPage}}


[22/50] ambari git commit: AMBARI-14116. hadoop.proxyuser.hcat.hosts contains only 2 of 3 WebHcat hosts (onechiporenko)

Posted by nc...@apache.org.
AMBARI-14116. hadoop.proxyuser.hcat.hosts contains only 2 of 3 WebHcat hosts (onechiporenko)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/61abd869
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/61abd869
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/61abd869

Branch: refs/heads/branch-dev-patch-upgrade
Commit: 61abd869b64eae39b77aa5da143034887dca3875
Parents: 78bef69
Author: Oleg Nechiporenko <on...@apache.org>
Authored: Mon Nov 30 19:10:34 2015 +0200
Committer: Oleg Nechiporenko <on...@apache.org>
Committed: Mon Nov 30 19:10:34 2015 +0200

----------------------------------------------------------------------
 ambari-web/app/controllers/main/host/details.js       |  3 +++
 ambari-web/test/controllers/main/host/details_test.js | 12 ++++++------
 2 files changed, 9 insertions(+), 6 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/61abd869/ambari-web/app/controllers/main/host/details.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/controllers/main/host/details.js b/ambari-web/app/controllers/main/host/details.js
index c3713fc..ea1a168 100644
--- a/ambari-web/app/controllers/main/host/details.js
+++ b/ambari-web/app/controllers/main/host/details.js
@@ -955,9 +955,12 @@ App.MainHostDetailsController = Em.Controller.extend(App.SupportClientConfigsDow
   getHiveHosts: function () {
     var
       hiveHosts = App.HostComponent.find().filterProperty('componentName', 'HIVE_METASTORE').mapProperty('hostName'),
+      webhcatHosts = App.HostComponent.find().filterProperty('componentName', 'WEBHCAT_SERVER').mapProperty('hostName'),
       hiveMetastoreHost = this.get('hiveMetastoreHost'),
       webhcatServerHost = this.get('webhcatServerHost');
 
+    hiveHosts = hiveHosts.concat(webhcatHosts).uniq();
+
     if (!!hiveMetastoreHost) {
       hiveHosts.push(hiveMetastoreHost);
       this.set('hiveMetastoreHost', '');

http://git-wip-us.apache.org/repos/asf/ambari/blob/61abd869/ambari-web/test/controllers/main/host/details_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/controllers/main/host/details_test.js b/ambari-web/test/controllers/main/host/details_test.js
index f0a2874..c5cf548 100644
--- a/ambari-web/test/controllers/main/host/details_test.js
+++ b/ambari-web/test/controllers/main/host/details_test.js
@@ -2981,7 +2981,7 @@ describe('App.MainHostDetailsController', function () {
           'deleteHiveMetaStore': false,
           'deleteWebHCatServer': false
         },
-        'hiveHosts': ['h1', 'h2'],
+        'hiveHosts': ['h1', 'h2', 'h4'],
         'title': 'adding HiveServer2'
       },
       {
@@ -2991,7 +2991,7 @@ describe('App.MainHostDetailsController', function () {
           'deleteHiveMetaStore': false,
           'deleteWebHCatServer': false
         },
-        'hiveHosts': ['h0', 'h1', 'h2'],
+        'hiveHosts': ['h0', 'h1', 'h2', 'h4'],
         'title': 'adding Hive Metastore'
       },
       {
@@ -3001,7 +3001,7 @@ describe('App.MainHostDetailsController', function () {
           'deleteHiveMetaStore': false,
           'deleteWebHCatServer': false
         },
-        'hiveHosts': ['h0', 'h1', 'h2'],
+        'hiveHosts': ['h0', 'h1', 'h2', 'h4'],
         'title': 'adding WebHCat Server'
       },
       {
@@ -3012,7 +3012,7 @@ describe('App.MainHostDetailsController', function () {
           'deleteHiveMetaStore': true,
           'deleteWebHCatServer': false
         },
-        'hiveHosts': ['h2'],
+        'hiveHosts': ['h2', 'h4'],
         'title': 'deleting Hive component'
       },
       {
@@ -3034,7 +3034,7 @@ describe('App.MainHostDetailsController', function () {
           'deleteHiveMetaStore': false,
           'deleteWebHCatServer': false
         },
-        'hiveHosts': ['h1'],
+        'hiveHosts': ['h1', 'h4'],
         'title': 'deleting host with Hive component'
       },
       {
@@ -3045,7 +3045,7 @@ describe('App.MainHostDetailsController', function () {
           'deleteHiveMetaStore': false,
           'deleteWebHCatServer': false
         },
-        'hiveHosts': ['h1'],
+        'hiveHosts': ['h1', 'h4'],
         'title': 'deleting host with WebHCat Server'
       }
     ];


[29/50] ambari git commit: AMBARI-14058. "Application Tracking URL" in Tez View broken due to RM HA changes in Ambari views framework. (Dipayan Bhowmick)

Posted by nc...@apache.org.
AMBARI-14058. "Application Tracking URL" in Tez View broken due to RM HA changes in Ambari views framework. (Dipayan Bhowmick)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/89c6f262
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/89c6f262
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/89c6f262

Branch: refs/heads/branch-dev-patch-upgrade
Commit: 89c6f26286540651ee46bcb50f618014863c5e79
Parents: fe690bf
Author: Yusaku Sako <yu...@hortonworks.com>
Authored: Mon Nov 30 15:51:15 2015 -0800
Committer: Yusaku Sako <yu...@hortonworks.com>
Committed: Mon Nov 30 15:51:15 2015 -0800

----------------------------------------------------------------------
 .../ambari/view/tez/rest/BaseProxyResource.java | 21 +++++++++++++++++++-
 .../resources/ui/scripts/init-ambari-view.js    |  3 +++
 2 files changed, 23 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/89c6f262/contrib/views/tez/src/main/java/org/apache/ambari/view/tez/rest/BaseProxyResource.java
----------------------------------------------------------------------
diff --git a/contrib/views/tez/src/main/java/org/apache/ambari/view/tez/rest/BaseProxyResource.java b/contrib/views/tez/src/main/java/org/apache/ambari/view/tez/rest/BaseProxyResource.java
index 3670a40..5f8fb91 100644
--- a/contrib/views/tez/src/main/java/org/apache/ambari/view/tez/rest/BaseProxyResource.java
+++ b/contrib/views/tez/src/main/java/org/apache/ambari/view/tez/rest/BaseProxyResource.java
@@ -19,6 +19,7 @@
 package org.apache.ambari.view.tez.rest;
 
 import com.google.inject.Inject;
+import org.apache.ambari.view.tez.exceptions.ProxyException;
 import org.apache.ambari.view.tez.utils.ProxyHelper;
 import org.json.simple.JSONObject;
 import org.json.simple.JSONValue;
@@ -27,7 +28,13 @@ import javax.ws.rs.GET;
 import javax.ws.rs.Path;
 import javax.ws.rs.PathParam;
 import javax.ws.rs.Produces;
-import javax.ws.rs.core.*;
+import javax.ws.rs.core.Context;
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.MultivaluedMap;
+import javax.ws.rs.core.Response;
+import javax.ws.rs.core.UriInfo;
+import java.net.URI;
+import java.net.URISyntaxException;
 import java.util.HashMap;
 
 /**
@@ -50,6 +57,18 @@ public abstract class BaseProxyResource {
     String response = proxyHelper.getResponse(url, new HashMap<String, String>());
 
     JSONObject jsonObject = (JSONObject) JSONValue.parse(response);
+
+    // If the endpoint returns some other format apart from JSON,
+    // we will only redirect the request. This is required because UI may call
+    // the proxy endpoint to directly point to any URL of RM/ATS.
+    if (jsonObject == null) {
+      try {
+        return Response.temporaryRedirect(new URI(url)).build();
+      } catch (URISyntaxException e) {
+        throw new ProxyException("Failed to set the redirection url to : " + url + ".Internal Error.",
+          Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(), e.getMessage());
+      }
+    }
     return Response.ok(jsonObject).type(MediaType.APPLICATION_JSON).build();
   }
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/89c6f262/contrib/views/tez/src/main/resources/ui/scripts/init-ambari-view.js
----------------------------------------------------------------------
diff --git a/contrib/views/tez/src/main/resources/ui/scripts/init-ambari-view.js b/contrib/views/tez/src/main/resources/ui/scripts/init-ambari-view.js
index 5152fb9..c443726 100644
--- a/contrib/views/tez/src/main/resources/ui/scripts/init-ambari-view.js
+++ b/contrib/views/tez/src/main/resources/ui/scripts/init-ambari-view.js
@@ -164,6 +164,9 @@ function setConfigs() {
       aminfo: '%@rmproxy/proxy/__app_id__/ws/v1/tez'.fmt(resourcesPrefix),
       aminfoV2: '%@rmproxy/proxy/__app_id__/ws/v2/tez'.fmt(resourcesPrefix),
       cluster: '%@rmproxy/ws/v1/cluster'.fmt(resourcesPrefix)
+    },
+    otherNamespace: {
+      cluster: '%@rmproxy/cluster'.fmt(resourcesPrefix)
     }
   });
 


[13/50] ambari git commit: AMBARI-11825. Failure to add or install component defined with cardinality ALL. (Shantanu Mundkur via Jaimin)

Posted by nc...@apache.org.
AMBARI-11825. Failure to add or install component defined with cardinality ALL. (Shantanu Mundkur via Jaimin)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/20a9ba18
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/20a9ba18
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/20a9ba18

Branch: refs/heads/branch-dev-patch-upgrade
Commit: 20a9ba1890d4bede13f2971d9e39cbeb0c743086
Parents: bea418b
Author: Jaimin Jetly <ja...@hortonworks.com>
Authored: Sat Nov 28 12:42:19 2015 -0800
Committer: Jaimin Jetly <ja...@hortonworks.com>
Committed: Sat Nov 28 12:42:19 2015 -0800

----------------------------------------------------------------------
 .../app/controllers/wizard/step8_controller.js  |  82 +++++++-
 .../test/controllers/wizard/step8_test.js       | 195 +++++++++++++++++++
 2 files changed, 271 insertions(+), 6 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/20a9ba18/ambari-web/app/controllers/wizard/step8_controller.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/controllers/wizard/step8_controller.js b/ambari-web/app/controllers/wizard/step8_controller.js
index fa0adaa..bbe40bd 100644
--- a/ambari-web/app/controllers/wizard/step8_controller.js
+++ b/ambari-web/app/controllers/wizard/step8_controller.js
@@ -1054,13 +1054,37 @@ App.WizardStep8Controller = Em.Controller.extend(App.AddSecurityConfigs, App.wiz
    * @method createMasterHostComponents
    */
   createMasterHostComponents: function () {
+    var masterOnAllHosts = [];
+
+    this.get('content.services').filterProperty('isSelected').forEach(function (service) {
+      service.get('serviceComponents').filterProperty('isRequiredOnAllHosts').forEach(function (component) {
+        if (component.get('isMaster')) {
+          masterOnAllHosts.push(component.get('componentName'));
+        }
+      }, this);
+    }, this);
+
     // create master components for only selected services.
     var selectedMasterComponents = this.get('content.masterComponentHosts').filter(function (_component) {
       return this.get('selectedServices').mapProperty('serviceName').contains(_component.serviceId)
     }, this);
     selectedMasterComponents.mapProperty('component').uniq().forEach(function (component) {
-      var hostNames = selectedMasterComponents.filterProperty('component', component).filterProperty('isInstalled', false).mapProperty('hostName');
-      this.registerHostsToComponent(hostNames, component);
+      if (masterOnAllHosts.length > 0) {
+        var compOnAllHosts = false;
+        for (var i=0; i < masterOnAllHosts.length; i++) {
+          if (component.component_name == masterOnAllHosts[i]) {
+            compOnAllHosts = true;
+            break;
+          }
+        }
+        if (!compOnAllHosts) {
+          var hostNames = selectedMasterComponents.filterProperty('component', component).filterProperty('isInstalled', false).mapProperty('hostName');
+          this.registerHostsToComponent(hostNames, component);
+        }
+      } else {
+        var hostNames = selectedMasterComponents.filterProperty('component', component).filterProperty('isInstalled', false).mapProperty('hostName');
+        this.registerHostsToComponent(hostNames, component);
+      }
     }, this);
   },
 
@@ -1077,6 +1101,7 @@ App.WizardStep8Controller = Em.Controller.extend(App.AddSecurityConfigs, App.wiz
     });
     return clientsMap;
   },
+
   /**
    * Register slave components and clients
    * @uses registerHostsToComponent
@@ -1086,6 +1111,18 @@ App.WizardStep8Controller = Em.Controller.extend(App.AddSecurityConfigs, App.wiz
     var masterHosts = this.get('content.masterComponentHosts');
     var slaveHosts = this.get('content.slaveComponentHosts');
     var clients = this.get('content.clients').filterProperty('isInstalled', false);
+    var slaveOnAllHosts = [];
+    var clientOnAllHosts = [];
+
+    this.get('content.services').filterProperty('isSelected').forEach(function (service) {
+      service.get('serviceComponents').filterProperty('isRequiredOnAllHosts').forEach(function (component) {
+        if (component.get('isClient')) {
+          clientOnAllHosts.push(component.get('componentName'));
+        } else if (component.get('isSlave')) {
+          slaveOnAllHosts.push(component.get('componentName'));
+        }
+      }, this);
+    }, this);
 
     /**
      * Determines on which hosts client should be installed (based on availability of master components on hosts)
@@ -1103,8 +1140,24 @@ App.WizardStep8Controller = Em.Controller.extend(App.AddSecurityConfigs, App.wiz
 
     slaveHosts.forEach(function (_slave) {
       if (_slave.componentName !== 'CLIENT') {
-        var hostNames = _slave.hosts.filterProperty('isInstalled', false).mapProperty('hostName');
-        this.registerHostsToComponent(hostNames, _slave.componentName);
+        if (slaveOnAllHosts.length > 0) {
+          var compOnAllHosts = false;
+          for (var i=0; i < slaveOnAllHosts.length; i++) {
+            if (_slave.component_name == slaveOnAllHosts[i]) {
+              // component with ALL cardinality should not
+              // registerHostsToComponent in createSlaveAndClientsHostComponents
+              compOnAllHosts = true;
+              break;
+            }
+          }
+          if (!compOnAllHosts) {
+            var hostNames = _slave.hosts.filterProperty('isInstalled', false).mapProperty('hostName');
+            this.registerHostsToComponent(hostNames, _slave.componentName);
+          }
+        } else {
+          var hostNames = _slave.hosts.filterProperty('isInstalled', false).mapProperty('hostName');
+          this.registerHostsToComponent(hostNames, _slave.componentName);
+        }
       }
       else {
         clients.forEach(function (_client) {
@@ -1120,8 +1173,24 @@ App.WizardStep8Controller = Em.Controller.extend(App.AddSecurityConfigs, App.wiz
               });
             }
           }
-          hostNames = hostNames.uniq();
-          this.registerHostsToComponent(hostNames, _client.component_name);
+          if (clientOnAllHosts.length > 0) {
+            var compOnAllHosts = false;
+            for (var i=0; i < clientOnAllHosts.length; i++) {
+              if (_client.component_name == clientOnAllHosts[i]) {
+                // component with ALL cardinality should not
+                // registerHostsToComponent in createSlaveAndClientsHostComponents
+                compOnAllHosts = true;
+                break;
+              }
+            }
+            if (!compOnAllHosts) {
+              hostNames = hostNames.uniq();
+              this.registerHostsToComponent(hostNames, _client.component_name);
+            }
+          } else {
+            hostNames = hostNames.uniq();
+            this.registerHostsToComponent(hostNames, _client.component_name);
+          }
         }, this);
       }
     }, this);
@@ -1220,6 +1289,7 @@ App.WizardStep8Controller = Em.Controller.extend(App.AddSecurityConfigs, App.wiz
     var masterHosts = this.get('content.masterComponentHosts');
 
     // add all components with cardinality == ALL of selected services
+
     var registeredHosts = this.getRegisteredHosts();
     var notInstalledHosts = registeredHosts.filterProperty('isInstalled', false);
     this.get('content.services').filterProperty('isSelected').forEach(function (service) {

http://git-wip-us.apache.org/repos/asf/ambari/blob/20a9ba18/ambari-web/test/controllers/wizard/step8_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/controllers/wizard/step8_test.js b/ambari-web/test/controllers/wizard/step8_test.js
index 100f781..71d1e48 100644
--- a/ambari-web/test/controllers/wizard/step8_test.js
+++ b/ambari-web/test/controllers/wizard/step8_test.js
@@ -1632,6 +1632,201 @@ describe('App.WizardStep8Controller', function () {
 
     });
 
+  describe('#createAdditionalHostComponentsOnAllHosts', function () {
+
+      beforeEach(function() {
+        sinon.stub(installerStep8Controller, 'registerHostsToComponent', Em.K);
+      });
+
+      afterEach(function() {
+        installerStep8Controller.registerHostsToComponent.restore();
+      });
+
+      it('should add components with isRequiredOnAllHosts == true (1)', function() {
+        installerStep8Controller.reopen({
+          getRegisteredHosts: function() {
+            return [{hostName: 'h1'}, {hostName: 'h2'}];
+          },
+          content: {
+            services: Em.A([
+              Em.Object.create({
+                serviceName: 'ANYSERVICE', isSelected: true, isInstalled: false, serviceComponents: [
+                  // set isRequiredOnAllHosts = true for slave and client
+                  Em.Object.create({
+                    componentName: 'ANYSERVICE_MASTER',
+                    isMaster: true,
+                    isRequiredOnAllHosts: false
+                  }),
+                  Em.Object.create({
+                    componentName: 'ANYSERVICE_SLAVE',
+                    isSlave: true,
+                    isRequiredOnAllHosts: true
+                  }),
+                  Em.Object.create({
+                    componentName: 'ANYSERVICE_SLAVE2',
+                    isSlave: true,
+                    isRequiredOnAllHosts: true
+                  }),
+                  Em.Object.create({
+                    componentName: 'ANYSERVICE_CLIENT',
+                    isClient: true,
+                    isRequiredOnAllHosts: true
+                  })
+                ]
+              })
+            ]),
+            masterComponentHosts: Em.A([
+              Em.Object.create({
+                componentName: 'ANYSERVICE_MASTER',
+                component: 'ANYSERVICE_MASTER',
+                hosts: Em.A([
+                  Em.Object.create({hostName: 'h1', isInstalled: true})
+                ])
+              })
+            ]),
+            slaveComponentHosts: Em.A([
+              Em.Object.create({
+                componentName: 'ANYSERVICE_SLAVE',
+                hosts: Em.A([
+                  Em.Object.create({hostName: 'h1', isInstalled: false}),
+                  Em.Object.create({hostName: 'h2', isInstalled: false})
+                ])
+              }),
+              Em.Object.create({
+                componentName: 'ANYSERVICE_SLAVE2',
+                hosts: Em.A([
+                  Em.Object.create({hostName: 'h1', isInstalled: false}),
+                  Em.Object.create({hostName: 'h2', isInstalled: false})
+                ]),
+              }),
+              Em.Object.create({
+                componentName: 'CLIENT',
+                hosts: Em.A([
+                  Em.Object.create({hostName: 'h1', isInstalled: false}),
+                  Em.Object.create({hostName: 'h2', isInstalled: false})
+                ])
+              })
+            ]),
+            clients: Em.A([
+              Em.Object.create({
+                component_name: 'ANYSERVICE_CLIENT',
+                isInstalled: false,
+                hosts: Em.A([
+                  Em.Object.create({hostName: 'h1', isInstalled: false}),
+                  Em.Object.create({hostName: 'h2', isInstalled: false})
+                ])
+              })
+            ])
+          }
+        });
+
+      installerStep8Controller.set('ajaxRequestsQueue', App.ajaxQueue.create());
+      installerStep8Controller.get('ajaxRequestsQueue').clear();
+      installerStep8Controller.createAdditionalHostComponents();
+      // Any component with isRequiredOnAllHosts = true implies that
+      // registerHostsToComponent would be done via
+      // createAdditionalHostComponents() BUT NOT
+      // createMasterHostComponents() or createSlaveAndClientsHostComponents()
+      // or createAdditionalClientComponents()
+      expect(installerStep8Controller.registerHostsToComponent.args[0][0]).to.eql(['h1', 'h2']);
+      expect(installerStep8Controller.registerHostsToComponent.args[0][1]).to.equal('ANYSERVICE_SLAVE');
+      expect(installerStep8Controller.registerHostsToComponent.args[1][0]).to.eql(['h1', 'h2']);
+      expect(installerStep8Controller.registerHostsToComponent.args[1][1]).to.equal('ANYSERVICE_SLAVE2');
+      expect(installerStep8Controller.registerHostsToComponent.args[2][0]).to.eql(['h1', 'h2']);
+      expect(installerStep8Controller.registerHostsToComponent.args[2][1]).to.equal('ANYSERVICE_CLIENT');
+      });
+
+      it('should not add components with isRequiredOnAllHosts == false (2)', function() {
+        installerStep8Controller.reopen({
+          getRegisteredHosts: function() {
+            return [{hostName: 'h1'}, {hostName: 'h2'}];
+          },
+          content: {
+            services: Em.A([
+              Em.Object.create({
+                serviceName: 'ANYSERVICE', isSelected: true, isInstalled: false, serviceComponents: [
+                  // set isRequiredOnAllHosts = false for all components
+                  Em.Object.create({
+                    componentName: 'ANYSERVICE_MASTER',
+                    isMaster: true,
+                    isRequiredOnAllHosts: false
+                  }),
+                  Em.Object.create({
+                    componentName: 'ANYSERVICE_SLAVE',
+                    isSlave: true,
+                    isRequiredOnAllHosts: false
+                  }),
+                  Em.Object.create({
+                    componentName: 'ANYSERVICE_SLAVE2',
+                    isSlave: true,
+                    isRequiredOnAllHosts: false
+                  }),
+                  Em.Object.create({
+                    componentName: 'ANYSERVICE_CLIENT',
+                    isClient: true,
+                    isRequiredOnAllHosts: false
+                  })
+                ]
+              })
+            ]),
+            masterComponentHosts: Em.A([
+              Em.Object.create({
+                componentName: 'ANYSERVICE_MASTER',
+                component: 'ANYSERVICE_MASTER',
+                hosts: Em.A([
+                  Em.Object.create({hostName: 'h1', isInstalled: true})
+                ])
+              })
+            ]),
+            slaveComponentHosts: Em.A([
+              Em.Object.create({
+                componentName: 'ANYSERVICE_SLAVE',
+                hosts: Em.A([
+                  Em.Object.create({hostName: 'h1', isInstalled: false}),
+                  Em.Object.create({hostName: 'h2', isInstalled: false})
+                ])
+              }),
+              Em.Object.create({
+                componentName: 'ANYSERVICE_SLAVE2',
+                hosts: Em.A([
+                  Em.Object.create({hostName: 'h1', isInstalled: false}),
+                  Em.Object.create({hostName: 'h2', isInstalled: false})
+                ]),
+              }),
+              Em.Object.create({
+                componentName: 'CLIENT',
+                hosts: Em.A([
+                  Em.Object.create({hostName: 'h1', isInstalled: false}),
+                  Em.Object.create({hostName: 'h2', isInstalled: false})
+                ])
+              })
+            ]),
+            clients: Em.A([
+              Em.Object.create({
+                component_name: 'ANYSERVICE_CLIENT',
+                isInstalled: false,
+                hosts: Em.A([
+                  Em.Object.create({hostName: 'h1', isInstalled: false}),
+                  Em.Object.create({hostName: 'h2', isInstalled: false})
+                ])
+              })
+            ])
+          }
+        });
+
+      installerStep8Controller.set('ajaxRequestsQueue', App.ajaxQueue.create());
+      installerStep8Controller.get('ajaxRequestsQueue').clear();
+      installerStep8Controller.createAdditionalHostComponents();
+      // isRequiredOnAllHosts = false for all components, implies that
+      // registerHostsToComponent would be done via
+      // createMasterHostComponents() or createSlaveAndClientsHostComponents()
+      // or createAdditionalClientComponents()
+      // BUT NOT createAdditionalHostComponents()
+      expect(installerStep8Controller.registerHostsToComponent.callCount).to.equal(0);
+      });
+
+  });
+
     describe('#createNotification', function () {
 
       beforeEach(function () {


[11/50] ambari git commit: AMBARI-14101. Post Upgrade: After upgrade oozie and hive server failing to come up. (dlysnichenko)

Posted by nc...@apache.org.
AMBARI-14101. Post Upgrade: After upgrade oozie and hive server failing to come up.  (dlysnichenko)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/d855386b
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/d855386b
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/d855386b

Branch: refs/heads/branch-dev-patch-upgrade
Commit: d855386b9798ef9c5a8669498b6b9c853b45edce
Parents: 5becb31
Author: Lisnichenko Dmitro <dl...@hortonworks.com>
Authored: Fri Nov 27 18:14:58 2015 +0200
Committer: Lisnichenko Dmitro <dl...@hortonworks.com>
Committed: Fri Nov 27 18:14:58 2015 +0200

----------------------------------------------------------------------
 .../server/upgrade/AbstractUpgradeCatalog.java  |   85 ++
 .../server/upgrade/UpgradeCatalog210.java       |   74 +-
 .../server/upgrade/UpgradeCatalog213.java       |   52 +-
 .../server/upgrade/UpgradeCatalog210Test.java   |    2 +-
 .../server/upgrade/UpgradeCatalog213Test.java   |   79 +-
 .../test_kerberos_descriptor_2_1_3.json         | 1316 ++++++++++++++++++
 6 files changed, 1505 insertions(+), 103 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/d855386b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/AbstractUpgradeCatalog.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/AbstractUpgradeCatalog.java b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/AbstractUpgradeCatalog.java
index ddc431d..7cbdd33 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/AbstractUpgradeCatalog.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/AbstractUpgradeCatalog.java
@@ -28,7 +28,9 @@ import org.apache.ambari.server.configuration.Configuration.DatabaseType;
 import org.apache.ambari.server.controller.AmbariManagementController;
 import org.apache.ambari.server.controller.ConfigurationRequest;
 import org.apache.ambari.server.orm.DBAccessor;
+import org.apache.ambari.server.orm.dao.ArtifactDAO;
 import org.apache.ambari.server.orm.dao.MetainfoDAO;
+import org.apache.ambari.server.orm.entities.ArtifactEntity;
 import org.apache.ambari.server.orm.entities.MetainfoEntity;
 import org.apache.ambari.server.state.Cluster;
 import org.apache.ambari.server.state.Clusters;
@@ -36,6 +38,9 @@ import org.apache.ambari.server.state.Config;
 import org.apache.ambari.server.state.ConfigHelper;
 import org.apache.ambari.server.state.PropertyInfo;
 import org.apache.ambari.server.state.ServiceInfo;
+import org.apache.ambari.server.state.kerberos.AbstractKerberosDescriptorContainer;
+import org.apache.ambari.server.state.kerberos.KerberosIdentityDescriptor;
+import org.apache.ambari.server.state.kerberos.KerberosServiceDescriptor;
 import org.apache.ambari.server.utils.VersionUtils;
 import org.apache.commons.lang.StringUtils;
 import org.slf4j.Logger;
@@ -554,6 +559,86 @@ public abstract class AbstractUpgradeCatalog implements UpgradeCatalog {
     return properties;
   }
 
+  /**
+   * Iterates through a collection of AbstractKerberosDescriptorContainers to find and update
+   * identity descriptor references.
+   *
+   * @param descriptorMap    a String to AbstractKerberosDescriptorContainer map to iterate trough
+   * @param referenceName    the reference name to change
+   * @param newReferenceName the new reference name
+   */
+  protected void updateKerberosDescriptorIdentityReferences(Map<String, ? extends AbstractKerberosDescriptorContainer> descriptorMap,
+                                                          String referenceName,
+                                                          String newReferenceName) {
+    if (descriptorMap != null) {
+      for (AbstractKerberosDescriptorContainer kerberosServiceDescriptor : descriptorMap.values()) {
+        updateKerberosDescriptorIdentityReferences(kerberosServiceDescriptor, referenceName, newReferenceName);
+
+        if (kerberosServiceDescriptor instanceof KerberosServiceDescriptor) {
+          updateKerberosDescriptorIdentityReferences(((KerberosServiceDescriptor) kerberosServiceDescriptor).getComponents(),
+              referenceName, newReferenceName);
+        }
+      }
+    }
+  }
+
+  /**
+   * Given an AbstractKerberosDescriptorContainer, iterates through its contained identity descriptors
+   * to find ones matching the reference name to change.
+   * <p/>
+   * If found, the reference name is updated to the new name.
+   *
+   * @param descriptorContainer the AbstractKerberosDescriptorContainer to update
+   * @param referenceName       the reference name to change
+   * @param newReferenceName    the new reference name
+   */
+  protected void updateKerberosDescriptorIdentityReferences(AbstractKerberosDescriptorContainer descriptorContainer,
+                                                          String referenceName,
+                                                          String newReferenceName) {
+    if (descriptorContainer != null) {
+      KerberosIdentityDescriptor identity = descriptorContainer.getIdentity(referenceName);
+      if (identity != null) {
+        identity.setName(newReferenceName);
+      }
+    }
+  }
+
+  /**
+   * Update the stored Kerberos Descriptor artifacts to conform to the new structure.
+   * <p/>
+   * Finds the relevant artifact entities and iterates through them to process each independently.
+   */
+  protected void updateKerberosDescriptorArtifacts() throws AmbariException {
+    ArtifactDAO artifactDAO = injector.getInstance(ArtifactDAO.class);
+    List<ArtifactEntity> artifactEntities = artifactDAO.findByName("kerberos_descriptor");
+
+    if (artifactEntities != null) {
+      for (ArtifactEntity artifactEntity : artifactEntities) {
+        updateKerberosDescriptorArtifact(artifactDAO, artifactEntity);
+      }
+    }
+  }
+
+
+
+  /**
+   * Update the specified Kerberos Descriptor artifact to conform to the new structure.
+   * <p/>
+   * On ambari version update some of identities can be moved between scopes(e.g. from service to component), so
+   * old identity need to be moved to proper place and all references for moved identity need to be updated.
+   * <p/>
+   * By default descriptor remains unchanged and this method must be overridden in child UpgradeCatalog to meet new
+   * ambari version changes in kerberos descriptors.
+   * <p/>
+   * The supplied ArtifactEntity is updated in place a merged back into the database.
+   *
+   * @param artifactDAO    the ArtifactDAO to use to store the updated ArtifactEntity
+   * @param artifactEntity the ArtifactEntity to update
+   */
+  protected void updateKerberosDescriptorArtifact(ArtifactDAO artifactDAO, ArtifactEntity artifactEntity) throws AmbariException {
+    // NOOP
+  }
+
   @Override
   public void upgradeSchema() throws AmbariException, SQLException {
     DatabaseType databaseType = configuration.getDatabaseType();

http://git-wip-us.apache.org/repos/asf/ambari/blob/d855386b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog210.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog210.java b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog210.java
index 2717993..7940e02 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog210.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog210.java
@@ -1007,36 +1007,10 @@ public class UpgradeCatalog210 extends AbstractUpgradeCatalog {
     updateKerberosDescriptorArtifacts();
   }
 
-
   /**
-   * Update the stored Kerberos Descriptor artifacts to conform to the new structure.
-   * <p/>
-   * Finds the relevant artifact entities and iterates through them to process each independently.
-   */
-  protected void updateKerberosDescriptorArtifacts() throws AmbariException {
-    ArtifactDAO artifactDAO = injector.getInstance(ArtifactDAO.class);
-    List<ArtifactEntity> artifactEntities = artifactDAO.findByName("kerberos_descriptor");
-
-    if (artifactEntities != null) {
-      for (ArtifactEntity artifactEntity : artifactEntities) {
-        updateKerberosDescriptorArtifact(artifactDAO, artifactEntity);
-      }
-    }
-  }
-
-  /**
-   * Update the specified Kerberos Descriptor artifact to conform to the new structure.
-   * <p/>
-   * To conform to the new Kerberos Descriptor structure, the global "hdfs" identity (if exists)
-   * must be moved to the set of identities under the HDFS service.  If no HDFS service exists, one
-   * is created to hold only the "hdfs" identity descriptor. Then, any identity descriptor references
-   * to "/hdfs" must be changed to "/HDFS/hdfs" to point to the moved "hdfs" identity descriptor.
-   * <p/>
-   * The supplied ArtifactEntity is updated in place a merged back into the database.
-   *
-   * @param artifactDAO    the ArtifactDAO to use to store the updated ArtifactEntity
-   * @param artifactEntity the ArtifactEntity to update
+   * {@inheritDoc}
    */
+  @Override
   protected void updateKerberosDescriptorArtifact(ArtifactDAO artifactDAO, ArtifactEntity artifactEntity) throws AmbariException {
     if (artifactEntity != null) {
       Map<String, Object> data = artifactEntity.getArtifactData();
@@ -1088,50 +1062,6 @@ public class UpgradeCatalog210 extends AbstractUpgradeCatalog {
   }
 
   /**
-   * Iterates through a collection of AbstractKerberosDescriptorContainers to find and update
-   * identity descriptor references.
-   *
-   * @param descriptorMap    a String to AbstractKerberosDescriptorContainer map to iterate trough
-   * @param referenceName    the reference name to change
-   * @param newReferenceName the new reference name
-   */
-  private void updateKerberosDescriptorIdentityReferences(Map<String, ? extends AbstractKerberosDescriptorContainer> descriptorMap,
-                                                          String referenceName,
-                                                          String newReferenceName) {
-    if (descriptorMap != null) {
-      for (AbstractKerberosDescriptorContainer kerberosServiceDescriptor : descriptorMap.values()) {
-        updateKerberosDescriptorIdentityReferences(kerberosServiceDescriptor, referenceName, newReferenceName);
-
-        if (kerberosServiceDescriptor instanceof KerberosServiceDescriptor) {
-          updateKerberosDescriptorIdentityReferences(((KerberosServiceDescriptor) kerberosServiceDescriptor).getComponents(),
-              referenceName, newReferenceName);
-        }
-      }
-    }
-  }
-
-  /**
-   * Given an AbstractKerberosDescriptorContainer, iterates through its contained identity descriptors
-   * to find ones matching the reference name to change.
-   * <p/>
-   * If found, the reference name is updated to the new name.
-   *
-   * @param descriptorContainer the AbstractKerberosDescriptorContainer to update
-   * @param referenceName       the reference name to change
-   * @param newReferenceName    the new reference name
-   */
-  private void updateKerberosDescriptorIdentityReferences(AbstractKerberosDescriptorContainer descriptorContainer,
-                                                          String referenceName,
-                                                          String newReferenceName) {
-    if (descriptorContainer != null) {
-      KerberosIdentityDescriptor identity = descriptorContainer.getIdentity(referenceName);
-      if (identity != null) {
-        identity.setName(newReferenceName);
-      }
-    }
-  }
-
-  /**
    * Delete STORM_REST_API component if HDP is upgraded past 2.2 and the
    * Component still exists.
    */

http://git-wip-us.apache.org/repos/asf/ambari/blob/d855386b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog213.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog213.java b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog213.java
index df681fa..a070935 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog213.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog213.java
@@ -29,22 +29,8 @@ import org.apache.ambari.server.configuration.Configuration;
 import org.apache.ambari.server.controller.AmbariManagementController;
 import org.apache.ambari.server.orm.DBAccessor;
 import org.apache.ambari.server.orm.DBAccessor.DBColumnInfo;
-import org.apache.ambari.server.orm.dao.AlertDefinitionDAO;
-import org.apache.ambari.server.orm.dao.ClusterDAO;
-import org.apache.ambari.server.orm.dao.ClusterVersionDAO;
-import org.apache.ambari.server.orm.dao.DaoUtils;
-import org.apache.ambari.server.orm.dao.HostVersionDAO;
-import org.apache.ambari.server.orm.dao.RepositoryVersionDAO;
-import org.apache.ambari.server.orm.dao.StackDAO;
-import org.apache.ambari.server.orm.dao.UpgradeDAO;
-import org.apache.ambari.server.orm.entities.AlertDefinitionEntity;
-import org.apache.ambari.server.orm.entities.ClusterEntity;
-import org.apache.ambari.server.orm.entities.ClusterVersionEntity;
-import org.apache.ambari.server.orm.entities.HostEntity;
-import org.apache.ambari.server.orm.entities.HostVersionEntity;
-import org.apache.ambari.server.orm.entities.RepositoryVersionEntity;
-import org.apache.ambari.server.orm.entities.StackEntity;
-import org.apache.ambari.server.orm.entities.UpgradeEntity;
+import org.apache.ambari.server.orm.dao.*;
+import org.apache.ambari.server.orm.entities.*;
 import org.apache.ambari.server.state.Cluster;
 import org.apache.ambari.server.state.Clusters;
 import org.apache.ambari.server.state.Config;
@@ -53,6 +39,7 @@ import org.apache.ambari.server.state.SecurityType;
 import org.apache.ambari.server.state.StackId;
 import org.apache.ambari.server.state.StackInfo;
 import org.apache.ambari.server.state.alert.SourceType;
+import org.apache.ambari.server.state.kerberos.*;
 import org.apache.ambari.server.state.stack.upgrade.Direction;
 import org.apache.ambari.server.state.stack.upgrade.RepositoryVersionHelper;
 import org.apache.ambari.server.state.stack.upgrade.UpgradeType;
@@ -313,6 +300,7 @@ public class UpgradeCatalog213 extends AbstractUpgradeCatalog {
     updateZookeeperLog4j();
     updateHiveConfig();
     updateAccumuloConfigs();
+    updateKerberosDescriptorArtifacts();
     updateKnoxTopology();
   }
 
@@ -612,6 +600,38 @@ public class UpgradeCatalog213 extends AbstractUpgradeCatalog {
   }
 
   /**
+   * {@inheritDoc}
+   */
+  @Override
+  protected void updateKerberosDescriptorArtifact(ArtifactDAO artifactDAO, ArtifactEntity artifactEntity) throws AmbariException {
+    if (artifactEntity != null) {
+      Map<String, Object> data = artifactEntity.getArtifactData();
+
+      if (data != null) {
+        final KerberosDescriptor kerberosDescriptor = new KerberosDescriptorFactory().createInstance(data);
+
+        if (kerberosDescriptor != null) {
+          KerberosServiceDescriptor hdfsService = kerberosDescriptor.getService("HDFS");
+          if(hdfsService != null) {
+            // before 2.1.3 hdfs indentity expected to be in HDFS service
+            KerberosIdentityDescriptor hdfsIdentity = hdfsService.getIdentity("hdfs");
+            KerberosComponentDescriptor namenodeComponent = hdfsService.getComponent("NAMENODE");
+            hdfsIdentity.setName("hdfs");
+            hdfsService.removeIdentity("hdfs");
+            namenodeComponent.putIdentity(hdfsIdentity);
+          }
+          updateKerberosDescriptorIdentityReferences(kerberosDescriptor, "/HDFS/hdfs", "/HDFS/NAMENODE/hdfs");
+          updateKerberosDescriptorIdentityReferences(kerberosDescriptor.getServices(), "/HDFS/hdfs", "/HDFS/NAMENODE/hdfs");
+
+          artifactEntity.setArtifactData(kerberosDescriptor.toMap());
+          artifactDAO.merge(artifactEntity);
+        }
+      }
+    }
+  }
+
+  /**
+   * If still on HDP 2.1, then no repo versions exist, so need to bootstrap the HDP 2.1 repo version,
    * If still on HDP 2.1, then no repo versions exist, so need to bootstrap the HDP 2.1 repo version,
    * and mark it as CURRENT in the cluster_version table for the cluster, as well as the host_version table
    * for all hosts.

http://git-wip-us.apache.org/repos/asf/ambari/blob/d855386b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog210Test.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog210Test.java b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog210Test.java
index c2889fe..83018a2 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog210Test.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog210Test.java
@@ -216,7 +216,7 @@ public class UpgradeCatalog210Test {
       UpgradeCatalog210.class.getDeclaredMethod("removeStormRestApiServiceComponent");
 
     Method updateKerberosDescriptorArtifacts =
-      UpgradeCatalog210.class.getDeclaredMethod("updateKerberosDescriptorArtifacts");
+      AbstractUpgradeCatalog.class.getDeclaredMethod("updateKerberosDescriptorArtifacts");
 
     UpgradeCatalog210 upgradeCatalog210 = createMockBuilder(UpgradeCatalog210.class)
         .addMockedMethod(addNewConfigurationsFromXml)

http://git-wip-us.apache.org/repos/asf/ambari/blob/d855386b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog213Test.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog213Test.java b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog213Test.java
index 071cb69..d8e7267 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog213Test.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog213Test.java
@@ -40,20 +40,8 @@ import org.apache.ambari.server.controller.MaintenanceStateHelper;
 import org.apache.ambari.server.orm.DBAccessor;
 import org.apache.ambari.server.orm.GuiceJpaInitializer;
 import org.apache.ambari.server.orm.InMemoryDefaultTestModule;
-import org.apache.ambari.server.orm.dao.AlertDefinitionDAO;
-import org.apache.ambari.server.orm.dao.ClusterDAO;
-import org.apache.ambari.server.orm.dao.ClusterVersionDAO;
-import org.apache.ambari.server.orm.dao.DaoUtils;
-import org.apache.ambari.server.orm.dao.HostVersionDAO;
-import org.apache.ambari.server.orm.dao.RepositoryVersionDAO;
-import org.apache.ambari.server.orm.dao.StackDAO;
-import org.apache.ambari.server.orm.entities.AlertDefinitionEntity;
-import org.apache.ambari.server.orm.entities.ClusterEntity;
-import org.apache.ambari.server.orm.entities.ClusterVersionEntity;
-import org.apache.ambari.server.orm.entities.HostEntity;
-import org.apache.ambari.server.orm.entities.HostVersionEntity;
-import org.apache.ambari.server.orm.entities.RepositoryVersionEntity;
-import org.apache.ambari.server.orm.entities.StackEntity;
+import org.apache.ambari.server.orm.dao.*;
+import org.apache.ambari.server.orm.entities.*;
 import org.apache.ambari.server.stack.StackManagerFactory;
 import org.apache.ambari.server.state.Cluster;
 import org.apache.ambari.server.state.Clusters;
@@ -64,6 +52,7 @@ import org.apache.ambari.server.state.SecurityType;
 import org.apache.ambari.server.state.Service;
 import org.apache.ambari.server.state.StackId;
 import org.apache.ambari.server.state.StackInfo;
+import org.apache.ambari.server.state.kerberos.*;
 import org.apache.ambari.server.state.stack.OsFamily;
 import org.apache.ambari.server.state.stack.upgrade.RepositoryVersionHelper;
 import org.easymock.Capture;
@@ -76,8 +65,10 @@ import org.junit.Before;
 import org.junit.Test;
 
 import javax.persistence.EntityManager;
+import java.io.File;
 import java.lang.reflect.InvocationTargetException;
 import java.lang.reflect.Method;
+import java.net.URL;
 import java.sql.SQLException;
 import java.util.ArrayList;
 import java.util.Collection;
@@ -100,6 +91,8 @@ import static org.easymock.EasyMock.reset;
 import static org.easymock.EasyMock.verify;
 import static org.junit.Assert.assertTrue;
 
+import static junit.framework.Assert.assertNotNull;
+import static junit.framework.Assert.assertNull;
 /**
  * {@link org.apache.ambari.server.upgrade.UpgradeCatalog213} unit tests.
  */
@@ -237,6 +230,7 @@ public class UpgradeCatalog213Test {
     Method updateRangerEnvConfig = UpgradeCatalog213.class.getDeclaredMethod("updateRangerEnvConfig");
     Method updateHiveConfig = UpgradeCatalog213.class.getDeclaredMethod("updateHiveConfig");
     Method updateAccumuloConfigs = UpgradeCatalog213.class.getDeclaredMethod("updateAccumuloConfigs");
+    Method updateKerberosDescriptorArtifacts = AbstractUpgradeCatalog.class.getDeclaredMethod("updateKerberosDescriptorArtifacts");
     Method updateKnoxTopology = UpgradeCatalog213.class.getDeclaredMethod("updateKnoxTopology");
 
     UpgradeCatalog213 upgradeCatalog213 = createMockBuilder(UpgradeCatalog213.class)
@@ -253,6 +247,7 @@ public class UpgradeCatalog213Test {
       .addMockedMethod(updateRangerEnvConfig)
       .addMockedMethod(updateHiveConfig)
       .addMockedMethod(updateAccumuloConfigs)
+      .addMockedMethod(updateKerberosDescriptorArtifacts)
       .addMockedMethod(updateKnoxTopology)
       .createMock();
 
@@ -283,6 +278,8 @@ public class UpgradeCatalog213Test {
     expectLastCall().once();
     upgradeCatalog213.updateKnoxTopology();
     expectLastCall().once();
+    upgradeCatalog213.updateKerberosDescriptorArtifacts();
+    expectLastCall().once();
 
     replay(upgradeCatalog213);
 
@@ -454,6 +451,60 @@ public class UpgradeCatalog213Test {
   }
 
   @Test
+  public void testUpdateKerberosDescriptorArtifact() throws Exception {
+    final KerberosDescriptorFactory kerberosDescriptorFactory = new KerberosDescriptorFactory();
+
+    KerberosServiceDescriptor serviceDescriptor;
+
+    URL systemResourceURL = ClassLoader.getSystemResource("kerberos/test_kerberos_descriptor_2_1_3.json");
+    assertNotNull(systemResourceURL);
+
+    final KerberosDescriptor kerberosDescriptorOrig = kerberosDescriptorFactory.createInstance(new File(systemResourceURL.getFile()));
+    assertNotNull(kerberosDescriptorOrig);
+
+    serviceDescriptor = kerberosDescriptorOrig.getService("HDFS");
+    assertNotNull(serviceDescriptor);
+    assertNotNull(serviceDescriptor.getIdentity("hdfs"));
+
+    serviceDescriptor = kerberosDescriptorOrig.getService("OOZIE");
+    assertNotNull(serviceDescriptor);
+    assertNotNull(serviceDescriptor.getIdentity("/HDFS/hdfs"));
+
+    UpgradeCatalog213 upgradeMock = createMockBuilder(UpgradeCatalog213.class).createMock();
+
+    Capture<Map<String, Object>> updatedData = new Capture<Map<String, Object>>();
+
+    ArtifactEntity artifactEntity = createNiceMock(ArtifactEntity.class);
+    expect(artifactEntity.getArtifactData())
+        .andReturn(kerberosDescriptorOrig.toMap())
+        .once();
+
+    artifactEntity.setArtifactData(capture(updatedData));
+    expectLastCall().once();
+
+    replay(artifactEntity, upgradeMock);
+    upgradeMock.updateKerberosDescriptorArtifact(createNiceMock(ArtifactDAO.class), artifactEntity);
+    verify(artifactEntity, upgradeMock);
+
+    KerberosDescriptor kerberosDescriptorUpdated = new KerberosDescriptorFactory().createInstance(updatedData.getValue());
+    assertNotNull(kerberosDescriptorUpdated);
+
+    serviceDescriptor = kerberosDescriptorUpdated.getService("HDFS");
+    assertNotNull(serviceDescriptor);
+    assertNull(serviceDescriptor.getIdentity("hdfs"));
+
+    KerberosComponentDescriptor namenodeComponent = serviceDescriptor.getComponent("NAMENODE");
+    assertNotNull(namenodeComponent.getIdentity("hdfs"));
+
+    serviceDescriptor = kerberosDescriptorUpdated.getService("OOZIE");
+    assertNotNull(serviceDescriptor);
+    assertNull(serviceDescriptor.getIdentity("/HDFS/hdfs"));
+    assertNotNull(serviceDescriptor.getIdentity("/HDFS/NAMENODE/hdfs"));
+  }
+
+
+
+  @Test
   public void testUpdateHbaseEnvConfig() throws AmbariException {
     EasyMockSupport easyMockSupport = new EasyMockSupport();
     final AmbariManagementController mockAmbariManagementController = easyMockSupport.createNiceMock(AmbariManagementController.class);

http://git-wip-us.apache.org/repos/asf/ambari/blob/d855386b/ambari-server/src/test/resources/kerberos/test_kerberos_descriptor_2_1_3.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/resources/kerberos/test_kerberos_descriptor_2_1_3.json b/ambari-server/src/test/resources/kerberos/test_kerberos_descriptor_2_1_3.json
new file mode 100644
index 0000000..3b4dff4
--- /dev/null
+++ b/ambari-server/src/test/resources/kerberos/test_kerberos_descriptor_2_1_3.json
@@ -0,0 +1,1316 @@
+{
+  "identities": [{
+    "principal": {
+      "type": "service",
+      "value": "HTTP/_HOST@${realm}"
+    },
+    "name": "spnego",
+    "keytab": {
+      "owner": {
+        "access": "r",
+        "name": "root"
+      },
+      "file": "${keytab_dir}/spnego.service.keytab",
+      "group": {
+        "access": "r",
+        "name": "${cluster-env/user_group}"
+      }
+    }
+  }, {
+    "principal": {
+      "configuration": "cluster-env/smokeuser_principal_name",
+      "type": "user",
+      "local_username": "${cluster-env/smokeuser}",
+      "value": "${cluster-env/smokeuser}-${cluster_name}@${realm}"
+    },
+    "name": "smokeuser",
+    "keytab": {
+      "owner": {
+        "access": "r",
+        "name": "${cluster-env/smokeuser}"
+      },
+      "file": "${keytab_dir}/smokeuser.headless.keytab",
+      "configuration": "cluster-env/smokeuser_keytab",
+      "group": {
+        "access": "r",
+        "name": "${cluster-env/user_group}"
+      }
+    }
+  }],
+  "services": [{
+    "components": [{
+      "name": "MAHOUT"
+    }],
+    "identities": [{
+      "name": "/smokeuser"
+    }, {
+      "name": "/HDFS/hdfs"
+    }],
+    "name": "MAHOUT"
+  }, {
+    "components": [{
+      "identities": [{
+        "principal": {
+          "configuration": "mapred-site/mapreduce.jobhistory.principal",
+          "type": "service",
+          "local_username": "${mapred-env/mapred_user}",
+          "value": "jhs/_HOST@${realm}"
+        },
+        "name": "history_server_jhs",
+        "keytab": {
+          "owner": {
+            "access": "r",
+            "name": "${mapred-env/mapred_user}"
+          },
+          "file": "${keytab_dir}/jhs.service.keytab",
+          "configuration": "mapred-site/mapreduce.jobhistory.keytab",
+          "group": {
+            "access": "",
+            "name": "${cluster-env/user_group}"
+          }
+        }
+      }, {
+        "principal": {
+          "configuration": "mapred-site/mapreduce.jobhistory.webapp.spnego-principal",
+          "type": "service",
+          "value": "HTTP/_HOST@${realm}"
+        },
+        "name": "/spnego",
+        "keytab": {
+          "owner": {},
+          "file": "${keytab_dir}/spnego.service.keytab",
+          "configuration": "mapred-site/mapreduce.jobhistory.webapp.spnego-keytab-file",
+          "group": {}
+        }
+      }],
+      "name": "HISTORYSERVER"
+    }],
+    "identities": [{
+      "name": "/spnego"
+    }, {
+      "name": "/HDFS/hdfs"
+    }, {
+      "name": "/smokeuser"
+    }],
+    "name": "MAPREDUCE2"
+  }, {
+    "components": [{
+      "identities": [{
+        "principal": {
+          "configuration": "oozie-site/oozie.service.HadoopAccessorService.kerberos.principal",
+          "type": "service",
+          "local_username": "${oozie-env/oozie_user}",
+          "value": "oozie/_HOST@${realm}"
+        },
+        "name": "oozie_server",
+        "keytab": {
+          "owner": {
+            "access": "r",
+            "name": "${oozie-env/oozie_user}"
+          },
+          "file": "${keytab_dir}/oozie.service.keytab",
+          "configuration": "oozie-site/oozie.service.HadoopAccessorService.keytab.file",
+          "group": {
+            "access": "",
+            "name": "${cluster-env/user_group}"
+          }
+        }
+      }, {
+        "principal": {
+          "configuration": "oozie-site/oozie.authentication.kerberos.principal",
+          "type": "service"
+        },
+        "name": "/spnego",
+        "keytab": {
+          "owner": {},
+          "configuration": "oozie-site/oozie.authentication.kerberos.keytab",
+          "group": {}
+        }
+      }],
+      "name": "OOZIE_SERVER"
+    }],
+    "identities": [{
+      "name": "/spnego"
+    }, {
+      "name": "/smokeuser"
+    }, {
+      "name": "/HDFS/hdfs"
+    }],
+    "auth_to_local_properties": [
+      "oozie-site/oozie.authentication.kerberos.name.rules"
+    ],
+    "configurations": [{
+      "oozie-site": {
+        "oozie.service.HadoopAccessorService.kerberos.enabled": "true",
+        "oozie.authentication.type": "kerberos",
+        "oozie.service.AuthorizationService.authorization.enabled": "true",
+        "local.realm": "${realm}",
+        "oozie.credentials.credentialclasses": "hcat=org.apache.oozie.action.hadoop.HCatCredentials,hive2=org.apache.oozie.action.hadoop.Hive2Credentials"
+      }
+    }],
+    "name": "OOZIE"
+  }, {
+    "components": [{
+      "identities": [{
+        "principal": {
+          "configuration": "hdfs-site/dfs.secondary.namenode.kerberos.principal",
+          "type": "service",
+          "local_username": "${hadoop-env/hdfs_user}",
+          "value": "nn/_HOST@${realm}"
+        },
+        "name": "secondary_namenode_nn",
+        "keytab": {
+          "owner": {
+            "access": "r",
+            "name": "${hadoop-env/hdfs_user}"
+          },
+          "file": "${keytab_dir}/nn.service.keytab",
+          "configuration": "hdfs-site/dfs.secondary.namenode.keytab.file",
+          "group": {
+            "access": "",
+            "name": "${cluster-env/user_group}"
+          }
+        }
+      }, {
+        "principal": {
+          "configuration": "hdfs-site/dfs.secondary.namenode.kerberos.internal.spnego.principal",
+          "type": "service",
+          "value": "HTTP/_HOST@${realm}"
+        },
+        "name": "/spnego"
+      }],
+      "name": "SECONDARY_NAMENODE"
+    }, {
+      "identities": [{
+        "principal": {
+          "configuration": "hdfs-site/dfs.datanode.kerberos.principal",
+          "type": "service",
+          "local_username": "${hadoop-env/hdfs_user}",
+          "value": "dn/_HOST@${realm}"
+        },
+        "name": "datanode_dn",
+        "keytab": {
+          "owner": {
+            "access": "r",
+            "name": "${hadoop-env/hdfs_user}"
+          },
+          "file": "${keytab_dir}/dn.service.keytab",
+          "configuration": "hdfs-site/dfs.datanode.keytab.file",
+          "group": {
+            "access": "",
+            "name": "${cluster-env/user_group}"
+          }
+        }
+      }],
+      "configurations": [{
+        "hdfs-site": {
+          "dfs.datanode.address": "0.0.0.0:1019",
+          "dfs.datanode.http.address": "0.0.0.0:1022"
+        }
+      }],
+      "name": "DATANODE"
+    }, {
+      "identities": [{
+        "principal": {
+          "configuration": "hdfs-site/nfs.kerberos.principal",
+          "type": "service",
+          "local_username": "${hadoop-env/hdfs_user}",
+          "value": "nfs/_HOST@${realm}"
+        },
+        "name": "nfsgateway",
+        "keytab": {
+          "owner": {
+            "access": "r",
+            "name": "${hadoop-env/hdfs_user}"
+          },
+          "file": "${keytab_dir}/nfs.service.keytab",
+          "configuration": "hdfs-site/nfs.keytab.file",
+          "group": {
+            "access": "",
+            "name": "${cluster-env/user_group}"
+          }
+        }
+      }],
+      "name": "NFS_GATEWAY"
+    }, {
+      "identities": [{
+        "principal": {
+          "configuration": "hdfs-site/dfs.journalnode.kerberos.principal",
+          "type": "service",
+          "local_username": "${hadoop-env/hdfs_user}",
+          "value": "jn/_HOST@${realm}"
+        },
+        "name": "journalnode_jn",
+        "keytab": {
+          "owner": {
+            "access": "r",
+            "name": "${hadoop-env/hdfs_user}"
+          },
+          "file": "${keytab_dir}/jn.service.keytab",
+          "configuration": "hdfs-site/dfs.journalnode.keytab.file",
+          "group": {
+            "access": "",
+            "name": "${cluster-env/user_group}"
+          }
+        }
+      }, {
+        "principal": {
+          "configuration": "hdfs-site/dfs.journalnode.kerberos.internal.spnego.principal",
+          "type": "service",
+          "value": "HTTP/_HOST@${realm}"
+        },
+        "name": "/spnego"
+      }],
+      "name": "JOURNALNODE"
+    }, {
+      "identities": [{
+        "principal": {
+          "configuration": "hdfs-site/dfs.namenode.kerberos.principal",
+          "type": "service",
+          "local_username": "${hadoop-env/hdfs_user}",
+          "value": "nn/_HOST@${realm}"
+        },
+        "name": "namenode_nn",
+        "keytab": {
+          "owner": {
+            "access": "r",
+            "name": "${hadoop-env/hdfs_user}"
+          },
+          "file": "${keytab_dir}/nn.service.keytab",
+          "configuration": "hdfs-site/dfs.namenode.keytab.file",
+          "group": {
+            "access": "",
+            "name": "${cluster-env/user_group}"
+          }
+        }
+      }, {
+        "principal": {
+          "configuration": "hdfs-site/dfs.namenode.kerberos.internal.spnego.principal",
+          "type": "service",
+          "value": "HTTP/_HOST@${realm}"
+        },
+        "name": "/spnego"
+      }],
+      "configurations": [{
+        "hdfs-site": {
+          "dfs.block.access.token.enable": "true"
+        }
+      }],
+      "name": "NAMENODE"
+    }],
+    "identities": [{
+      "principal": {
+        "configuration": "hdfs-site/dfs.web.authentication.kerberos.principal",
+        "type": "service",
+        "value": "HTTP/_HOST@${realm}"
+      },
+      "name": "/spnego",
+      "keytab": {
+        "owner": {},
+        "file": "${keytab_dir}/spnego.service.keytab",
+        "configuration": "hdfs-site/dfs.web.authentication.kerberos.keytab",
+        "group": {}
+      }
+    }, {
+      "name": "/smokeuser"
+    }, {
+      "principal": {
+        "configuration": "hadoop-env/hdfs_principal_name",
+        "type": "user",
+        "local_username": "${hadoop-env/hdfs_user}",
+        "value": "${hadoop-env/hdfs_user}-${cluster_name}@${realm}"
+      },
+      "name": "hdfs",
+      "keytab": {
+        "owner": {
+          "access": "r",
+          "name": "${hadoop-env/hdfs_user}"
+        },
+        "file": "${keytab_dir}/hdfs.headless.keytab",
+        "configuration": "hadoop-env/hdfs_user_keytab",
+        "group": {
+          "access": "r",
+          "name": "${cluster-env/user_group}"
+        }
+      }
+    }],
+    "auth_to_local_properties": [
+      "core-site/hadoop.security.auth_to_local"
+    ],
+    "configurations": [{
+      "core-site": {
+        "hadoop.security.authorization": "true",
+        "hadoop.security.authentication": "kerberos",
+        "hadoop.proxyuser.HTTP.groups": "${hadoop-env/proxyuser_group}"
+      }
+    }],
+    "name": "HDFS"
+  }, {
+    "components": [{
+      "configurations": [{
+        "tez-site": {
+          "tez.am.view-acls": ""
+        }
+      }],
+      "name": "TEZ_CLIENT"
+    }],
+    "name": "TEZ"
+  }, {
+    "components": [{
+      "name": "SPARK_CLIENT"
+    }, {
+      "name": "SPARK_JOBHISTORYSERVER"
+    }],
+    "identities": [{
+      "name": "/smokeuser"
+    }, {
+      "name": "/HDFS/hdfs"
+    }, {
+      "principal": {
+        "configuration": "spark-defaults/spark.history.kerberos.principal",
+        "type": "user",
+        "local_username": "${spark-env/spark_user}",
+        "value": "${spark-env/spark_user}-${cluster_name}@${realm}"
+      },
+      "name": "sparkuser",
+      "keytab": {
+        "owner": {
+          "access": "r",
+          "name": "${spark-env/spark_user}"
+        },
+        "file": "${keytab_dir}/spark.headless.keytab",
+        "configuration": "spark-defaults/spark.history.kerberos.keytab",
+        "group": {
+          "access": "",
+          "name": "${cluster-env/user_group}"
+        }
+      }
+    }],
+    "configurations": [{
+      "spark-defaults": {
+        "spark.history.kerberos.enabled": "true"
+      }
+    }],
+    "name": "SPARK"
+  }, {
+    "components": [{
+      "name": "ACCUMULO_MASTER"
+    }, {
+      "name": "ACCUMULO_MONITOR"
+    }, {
+      "name": "ACCUMULO_CLIENT"
+    }, {
+      "name": "ACCUMULO_TRACER"
+    }, {
+      "name": "ACCUMULO_TSERVER"
+    }, {
+      "name": "ACCUMULO_GC"
+    }],
+    "identities": [{
+      "principal": {
+        "configuration": "accumulo-env/accumulo_principal_name",
+        "type": "user",
+        "local_username": "${accumulo-env/accumulo_user}",
+        "value": "${accumulo-env/accumulo_user}-${cluster_name}@${realm}"
+      },
+      "name": "accumulo",
+      "keytab": {
+        "owner": {
+          "access": "r",
+          "name": "${accumulo-env/accumulo_user}"
+        },
+        "file": "${keytab_dir}/accumulo.headless.keytab",
+        "configuration": "accumulo-env/accumulo_user_keytab",
+        "group": {
+          "access": "r",
+          "name": "${cluster-env/user_group}"
+        }
+      }
+    }, {
+      "principal": {
+        "configuration": "accumulo-site/general.kerberos.principal",
+        "type": "service",
+        "local_username": "${accumulo-env/accumulo_user}",
+        "value": "${accumulo-env/accumulo_user}/_HOST@${realm}"
+      },
+      "name": "accumulo_service",
+      "keytab": {
+        "owner": {
+          "access": "r",
+          "name": "${accumulo-env/accumulo_user}"
+        },
+        "file": "${keytab_dir}/accumulo.service.keytab",
+        "configuration": "accumulo-site/general.kerberos.keytab",
+        "group": {
+          "access": "",
+          "name": "${cluster-env/user_group}"
+        }
+      }
+    }, {
+      "principal": {
+        "configuration": "accumulo-site/trace.user",
+        "type": "user",
+        "local_username": "${accumulo-env/accumulo_user}",
+        "value": "tracer-${cluster_name}@${realm}"
+      },
+      "name": "accumulo_tracer",
+      "keytab": {
+        "owner": {
+          "access": "r",
+          "name": "${accumulo-env/accumulo_user}"
+        },
+        "file": "${keytab_dir}/accumulo-tracer.headless.keytab",
+        "configuration": "accumulo-site/trace.token.property.keytab",
+        "group": {
+          "access": "",
+          "name": "${cluster-env/user_group}"
+        }
+      }
+    }, {
+      "name": "/HDFS/hdfs"
+    }, {
+      "name": "/smokeuser"
+    }],
+    "configurations": [{
+      "accumulo-site": {
+        "instance.security.authenticator": "org.apache.accumulo.server.security.handler.KerberosAuthenticator",
+        "instance.rpc.sasl.enabled": "true",
+        "general.delegation.token.lifetime": "7d",
+        "trace.token.type": "org.apache.accumulo.core.client.security.tokens.KerberosToken",
+        "instance.security.permissionHandler": "org.apache.accumulo.server.security.handler.KerberosPermissionHandler",
+        "general.delegation.token.update.interval": "1d",
+        "instance.security.authorizor": "org.apache.accumulo.server.security.handler.KerberosAuthorizor"
+      }
+    }],
+    "name": "ACCUMULO"
+  }, {
+    "components": [{
+      "identities": [{
+        "principal": {
+          "configuration": "zookeeper-env/zookeeper_principal_name",
+          "type": "service",
+          "value": "zookeeper/_HOST@${realm}"
+        },
+        "name": "zookeeper_zk",
+        "keytab": {
+          "owner": {
+            "access": "r",
+            "name": "${zookeeper-env/zk_user}"
+          },
+          "file": "${keytab_dir}/zk.service.keytab",
+          "configuration": "zookeeper-env/zookeeper_keytab_path",
+          "group": {
+            "access": "",
+            "name": "${cluster-env/user_group}"
+          }
+        }
+      }],
+      "name": "ZOOKEEPER_SERVER"
+    }],
+    "identities": [{
+      "name": "/smokeuser"
+    }],
+    "name": "ZOOKEEPER"
+  }, {
+    "components": [{
+      "identities": [{
+        "principal": {
+          "configuration": "hbase-site/hbase.regionserver.kerberos.principal",
+          "type": "service",
+          "local_username": "${hbase-env/hbase_user}",
+          "value": "hbase/_HOST@${realm}"
+        },
+        "name": "hbase_regionserver_hbase",
+        "keytab": {
+          "owner": {
+            "access": "r",
+            "name": "${hbase-env/hbase_user}"
+          },
+          "file": "${keytab_dir}/hbase.service.keytab",
+          "configuration": "hbase-site/hbase.regionserver.keytab.file",
+          "group": {
+            "access": "",
+            "name": "${cluster-env/user_group}"
+          }
+        }
+      }],
+      "name": "HBASE_REGIONSERVER"
+    }, {
+      "identities": [{
+        "principal": {
+          "configuration": "hbase-site/hbase.master.kerberos.principal",
+          "type": "service",
+          "local_username": "${hbase-env/hbase_user}",
+          "value": "hbase/_HOST@${realm}"
+        },
+        "name": "hbase_master_hbase",
+        "keytab": {
+          "owner": {
+            "access": "r",
+            "name": "${hbase-env/hbase_user}"
+          },
+          "file": "${keytab_dir}/hbase.service.keytab",
+          "configuration": "hbase-site/hbase.master.keytab.file",
+          "group": {
+            "access": "",
+            "name": "${cluster-env/user_group}"
+          }
+        }
+      }],
+      "name": "HBASE_MASTER"
+    }, {
+      "identities": [{
+        "principal": {
+          "configuration": "hbase-site/phoenix.queryserver.kerberos.principal",
+          "type": "service",
+          "local_username": "${hbase-env/hbase_user}",
+          "value": "hbase/_HOST@${realm}"
+        },
+        "name": "hbase_queryserver_hbase",
+        "keytab": {
+          "owner": {
+            "access": "r",
+            "name": "${hbase-env/hbase_user}"
+          },
+          "file": "${keytab_dir}/hbase.service.keytab",
+          "configuration": "hbase-site/phoenix.queryserver.keytab.file",
+          "group": {
+            "access": "",
+            "name": "${cluster-env/user_group}"
+          }
+        }
+      }],
+      "name": "PHOENIX_QUERY_SERVER"
+    }],
+    "identities": [{
+      "name": "/spnego"
+    }, {
+      "name": "/HDFS/hdfs"
+    }, {
+      "principal": {
+        "configuration": "hbase-env/hbase_principal_name",
+        "type": "user",
+        "local_username": "${hbase-env/hbase_user}",
+        "value": "${hbase-env/hbase_user}-${cluster_name}@${realm}"
+      },
+      "name": "hbase",
+      "keytab": {
+        "owner": {
+          "access": "r",
+          "name": "${hbase-env/hbase_user}"
+        },
+        "file": "${keytab_dir}/hbase.headless.keytab",
+        "configuration": "hbase-env/hbase_user_keytab",
+        "group": {
+          "access": "r",
+          "name": "${cluster-env/user_group}"
+        }
+      }
+    }, {
+      "name": "/smokeuser"
+    }],
+    "configurations": [{
+      "hbase-site": {
+        "hbase.coprocessor.master.classes": "{{hbase_coprocessor_master_classes}}",
+        "hbase.security.authentication": "kerberos",
+        "hbase.coprocessor.region.classes": "{{hbase_coprocessor_region_classes}}",
+        "hbase.security.authorization": "true",
+        "hbase.bulkload.staging.dir": "/apps/hbase/staging",
+        "zookeeper.znode.parent": "/hbase-secure"
+      }
+    }],
+    "name": "HBASE"
+  }, {
+    "components": [{
+      "name": "KERBEROS_CLIENT"
+    }],
+    "identities": [{
+      "name": "/smokeuser"
+    }],
+    "name": "KERBEROS"
+  }, {
+    "components": [{
+      "identities": [{
+        "principal": {
+          "configuration": "kms-site/hadoop.kms.authentication.signer.secret.provider.zookeeper.kerberos.principal",
+          "type": "service"
+        },
+        "name": "/spnego",
+        "keytab": {
+          "owner": {},
+          "configuration": "kms-site/hadoop.kms.authentication.signer.secret.provider.zookeeper.kerberos.keytab",
+          "group": {}
+        }
+      }, {
+        "name": "/smokeuser"
+      }],
+      "name": "RANGER_KMS_SERVER"
+    }],
+    "identities": [{
+      "name": "/spnego",
+      "keytab": {
+        "owner": {},
+        "configuration": "kms-site/hadoop.kms.authentication.kerberos.keytab",
+        "group": {}
+      }
+    }, {
+      "name": "/smokeuser"
+    }],
+    "configurations": [{
+      "kms-site": {
+        "hadoop.kms.authentication.kerberos.principal": "*",
+        "hadoop.kms.authentication.type": "kerberos"
+      }
+    }],
+    "name": "RANGER_KMS"
+  }, {
+    "components": [{
+      "identities": [{
+        "principal": {
+          "configuration": "yarn-site/yarn.nodemanager.principal",
+          "type": "service",
+          "local_username": "${yarn-env/yarn_user}",
+          "value": "nm/_HOST@${realm}"
+        },
+        "name": "nodemanager_nm",
+        "keytab": {
+          "owner": {
+            "access": "r",
+            "name": "${yarn-env/yarn_user}"
+          },
+          "file": "${keytab_dir}/nm.service.keytab",
+          "configuration": "yarn-site/yarn.nodemanager.keytab",
+          "group": {
+            "access": "",
+            "name": "${cluster-env/user_group}"
+          }
+        }
+      }, {
+        "principal": {
+          "configuration": "yarn-site/yarn.nodemanager.webapp.spnego-principal",
+          "type": "service",
+          "value": "HTTP/_HOST@${realm}"
+        },
+        "name": "/spnego",
+        "keytab": {
+          "owner": {},
+          "file": "${keytab_dir}/spnego.service.keytab",
+          "configuration": "yarn-site/yarn.nodemanager.webapp.spnego-keytab-file",
+          "group": {}
+        }
+      }],
+      "configurations": [{
+        "yarn-site": {
+          "yarn.nodemanager.container-executor.class": "org.apache.hadoop.yarn.server.nodemanager.LinuxContainerExecutor"
+        }
+      }],
+      "name": "NODEMANAGER"
+    }, {
+      "identities": [{
+        "principal": {
+          "configuration": "yarn-site/yarn.timeline-service.principal",
+          "type": "service",
+          "local_username": "${yarn-env/yarn_user}",
+          "value": "yarn/_HOST@${realm}"
+        },
+        "name": "app_timeline_server_yarn",
+        "keytab": {
+          "owner": {
+            "access": "r",
+            "name": "${yarn-env/yarn_user}"
+          },
+          "file": "${keytab_dir}/yarn.service.keytab",
+          "configuration": "yarn-site/yarn.timeline-service.keytab",
+          "group": {
+            "access": "",
+            "name": "${cluster-env/user_group}"
+          }
+        }
+      }, {
+        "principal": {
+          "configuration": "yarn-site/yarn.timeline-service.http-authentication.kerberos.principal",
+          "type": "service",
+          "value": "HTTP/_HOST@${realm}"
+        },
+        "name": "/spnego",
+        "keytab": {
+          "owner": {},
+          "file": "${keytab_dir}/spnego.service.keytab",
+          "configuration": "yarn-site/yarn.timeline-service.http-authentication.kerberos.keytab",
+          "group": {}
+        }
+      }],
+      "name": "APP_TIMELINE_SERVER"
+    }, {
+      "identities": [{
+        "principal": {
+          "configuration": "yarn-site/yarn.resourcemanager.principal",
+          "type": "service",
+          "local_username": "${yarn-env/yarn_user}",
+          "value": "rm/_HOST@${realm}"
+        },
+        "name": "resource_manager_rm",
+        "keytab": {
+          "owner": {
+            "access": "r",
+            "name": "${yarn-env/yarn_user}"
+          },
+          "file": "${keytab_dir}/rm.service.keytab",
+          "configuration": "yarn-site/yarn.resourcemanager.keytab",
+          "group": {
+            "access": "",
+            "name": "${cluster-env/user_group}"
+          }
+        }
+      }, {
+        "principal": {
+          "configuration": "yarn-site/yarn.resourcemanager.webapp.spnego-principal",
+          "type": "service",
+          "value": "HTTP/_HOST@${realm}"
+        },
+        "name": "/spnego",
+        "keytab": {
+          "owner": {},
+          "file": "${keytab_dir}/spnego.service.keytab",
+          "configuration": "yarn-site/yarn.resourcemanager.webapp.spnego-keytab-file",
+          "group": {}
+        }
+      }],
+      "name": "RESOURCEMANAGER"
+    }],
+    "identities": [{
+      "name": "/spnego"
+    }, {
+      "name": "/HDFS/hdfs"
+    }, {
+      "name": "/smokeuser"
+    }],
+    "configurations": [{
+      "capacity-scheduler": {
+        "yarn.scheduler.capacity.root.default.acl_administer_queue": "${yarn-env/yarn_user}",
+        "yarn.scheduler.capacity.root.acl_administer_queue": "${yarn-env/yarn_user}",
+        "yarn.scheduler.capacity.root.default.acl_administer_jobs": "${yarn-env/yarn_user}",
+        "yarn.scheduler.capacity.root.acl_administer_jobs": "${yarn-env/yarn_user}",
+        "yarn.scheduler.capacity.root.default.acl_submit_applications": "${yarn-env/yarn_user}"
+      }
+    }, {
+      "yarn-site": {
+        "yarn.timeline-service.http-authentication.signer.secret.provider.object": "",
+        "yarn.resourcemanager.proxyusers.*.users": "",
+        "yarn.timeline-service.http-authentication.token.validity": "",
+        "yarn.admin.acl": "${yarn-env/yarn_user},dr.who",
+        "yarn.timeline-service.http-authentication.kerberos.name.rules": "",
+        "yarn.timeline-service.http-authentication.cookie.path": "",
+        "yarn.timeline-service.http-authentication.type": "kerberos",
+        "yarn.nodemanager.linux-container-executor.cgroups.mount-path": "",
+        "yarn.resourcemanager.proxy-user-privileges.enabled": "true",
+        "yarn.acl.enable": "true",
+        "yarn.timeline-service.http-authentication.signer.secret.provider": "",
+        "yarn.timeline-service.http-authentication.proxyusers.*.groups": "",
+        "yarn.timeline-service.http-authentication.proxyusers.*.hosts": "",
+        "yarn.timeline-service.http-authentication.signature.secret": "",
+        "yarn.timeline-service.http-authentication.signature.secret.file": "",
+        "yarn.resourcemanager.proxyusers.*.hosts": "",
+        "yarn.resourcemanager.proxyusers.*.groups": "",
+        "yarn.timeline-service.enabled": "true",
+        "yarn.timeline-service.http-authentication.proxyusers.*.users": "",
+        "yarn.timeline-service.http-authentication.cookie.domain": ""
+      }
+    }, {
+      "core-site": {
+        "hadoop.proxyuser.yarn.groups": "*",
+        "hadoop.proxyuser.yarn.hosts": "${yarn-site/yarn.resourcemanager.hostname}"
+      }
+    }],
+    "name": "YARN"
+  }, {
+    "components": [{
+      "identities": [{
+        "principal": {
+          "configuration": "knox-env/knox_principal_name",
+          "type": "service",
+          "local_username": "${knox-env/knox_user}",
+          "value": "${knox-env/knox_user}/_HOST@${realm}"
+        },
+        "name": "knox_principal",
+        "keytab": {
+          "owner": {
+            "access": "r",
+            "name": "${knox-env/knox_user}"
+          },
+          "file": "${keytab_dir}/knox.service.keytab",
+          "configuration": "knox-env/knox_keytab_path",
+          "group": {
+            "access": "",
+            "name": "${cluster-env/user_group}"
+          }
+        }
+      }],
+      "configurations": [{
+        "oozie-site": {
+          "oozie.service.ProxyUserService.proxyuser.${knox-env/knox_user}.groups": "${hadoop-env/proxyuser_group}",
+          "oozie.service.ProxyUserService.proxyuser.${knox-env/knox_user}.hosts": "${clusterHostInfo/knox_gateway_hosts}"
+        }
+      }, {
+        "webhcat-site": {
+          "webhcat.proxyuser.${knox-env/knox_user}.groups": "${hadoop-env/proxyuser_group}",
+          "webhcat.proxyuser.${knox-env/knox_user}.hosts": "${clusterHostInfo/knox_gateway_hosts}"
+        }
+      }, {
+        "gateway-site": {
+          "gateway.hadoop.kerberos.secured": "true",
+          "java.security.krb5.conf": "/etc/krb5.conf"
+        }
+      }, {
+        "core-site": {
+          "hadoop.proxyuser.${knox-env/knox_user}.hosts": "${clusterHostInfo/knox_gateway_hosts}",
+          "hadoop.proxyuser.${knox-env/knox_user}.groups": "${hadoop-env/proxyuser_group}"
+        }
+      }],
+      "name": "KNOX_GATEWAY"
+    }],
+    "name": "KNOX"
+  }, {
+    "components": [{
+      "identities": [{
+        "principal": {
+          "configuration": "storm-env/storm_ui_principal_name",
+          "type": "service"
+        },
+        "name": "/spnego",
+        "keytab": {
+          "owner": {},
+          "configuration": "storm-env/storm_ui_keytab",
+          "group": {}
+        }
+      }],
+      "name": "STORM_UI_SERVER"
+    }, {
+      "name": "SUPERVISOR"
+    }, {
+      "identities": [{
+        "principal": {
+          "configuration": "storm-env/nimbus_principal_name",
+          "type": "service",
+          "value": "nimbus/_HOST@${realm}"
+        },
+        "name": "nimbus_server",
+        "keytab": {
+          "owner": {
+            "access": "r",
+            "name": "${storm-env/storm_user}"
+          },
+          "file": "${keytab_dir}/nimbus.service.keytab",
+          "configuration": "storm-env/nimbus_keytab",
+          "group": {
+            "access": "",
+            "name": "${cluster-env/user_group}"
+          }
+        }
+      }],
+      "name": "NIMBUS"
+    }, {
+      "identities": [{
+        "principal": {
+          "configuration": "storm-env/nimbus_principal_name",
+          "type": "service",
+          "value": "nimbus/_HOST@${realm}"
+        },
+        "name": "nimbus_server",
+        "keytab": {
+          "owner": {
+            "access": "r",
+            "name": "${storm-env/storm_user}"
+          },
+          "file": "${keytab_dir}/nimbus.service.keytab",
+          "configuration": "storm-env/nimbus_keytab",
+          "group": {
+            "access": "",
+            "name": "${cluster-env/user_group}"
+          }
+        }
+      }],
+      "name": "DRPC_SERVER"
+    }],
+    "identities": [{
+      "name": "/spnego"
+    }, {
+      "name": "/smokeuser"
+    }, {
+      "principal": {
+        "configuration": "storm-env/storm_principal_name",
+        "type": "user",
+        "value": "${storm-env/storm_user}-${cluster_name}@${realm}"
+      },
+      "name": "storm_components",
+      "keytab": {
+        "owner": {
+          "access": "r",
+          "name": "${storm-env/storm_user}"
+        },
+        "file": "${keytab_dir}/storm.headless.keytab",
+        "configuration": "storm-env/storm_keytab",
+        "group": {
+          "access": "",
+          "name": "${cluster-env/user_group}"
+        }
+      }
+    }],
+    "configurations": [{
+      "storm-site": {
+        "nimbus.authorizer": "backtype.storm.security.auth.authorizer.SimpleACLAuthorizer",
+        "java.security.auth.login.config": "{{conf_dir}}/storm_jaas.conf",
+        "drpc.authorizer": "backtype.storm.security.auth.authorizer.DRPCSimpleACLAuthorizer",
+        "storm.principal.tolocal": "backtype.storm.security.auth.KerberosPrincipalToLocal",
+        "storm.zookeeper.superACL": "sasl:{{storm_bare_jaas_principal}}",
+        "ui.filter.params": "{'type': 'kerberos', 'kerberos.principal': '{{storm_ui_jaas_principal}}', 'kerberos.keytab': '{{storm_ui_keytab_path}}', 'kerberos.name.rules': 'DEFAULT'}",
+        "nimbus.supervisor.users": "['{{storm_bare_jaas_principal}}']",
+        "nimbus.admins": "['{{storm_bare_jaas_principal}}']",
+        "ui.filter": "org.apache.hadoop.security.authentication.server.AuthenticationFilter",
+        "supervisor.enable": "true"
+      }
+    }],
+    "name": "STORM"
+  }, {
+    "components": [{
+      "identities": [{
+        "principal": {
+          "configuration": "application-properties/atlas.authentication.principal",
+          "type": "service",
+          "local_username": "${atlas-env/metadata_user}",
+          "value": "atlas/_HOST@${realm}"
+        },
+        "name": "atlas",
+        "keytab": {
+          "owner": {
+            "access": "r",
+            "name": "${atlas-env/metadata_user}"
+          },
+          "file": "${keytab_dir}/atlas.service.keytab",
+          "configuration": "application-properties/atlas.authentication.keytab",
+          "group": {
+            "access": "",
+            "name": "${cluster-env/user_group}"
+          }
+        }
+      }, {
+        "principal": {
+          "configuration": "application-properties/atlas.http.authentication.kerberos.principal",
+          "type": "service",
+          "value": "HTTP/_HOST@${realm}"
+        },
+        "name": "/spnego",
+        "keytab": {
+          "owner": {},
+          "configuration": "application-properties/atlas.http.authentication.kerberos.keytab",
+          "group": {}
+        }
+      }],
+      "name": "ATLAS_SERVER"
+    }],
+    "auth_to_local_properties": [
+      "application-properties/atlas.http.authentication.kerberos.name.rules"
+    ],
+    "configurations": [{
+      "application-properties": {
+        "atlas.authentication.method": "kerberos",
+        "atlas.http.authentication.enabled": "true",
+        "atlas.http.authentication.type": "kerberos"
+      }
+    }],
+    "name": "ATLAS"
+  }, {
+    "components": [{
+      "identities": [{
+        "principal": {
+          "configuration": "hive-site/hive.server2.authentication.kerberos.principal",
+          "type": "service",
+          "local_username": "${hive-env/hive_user}",
+          "value": "hive/_HOST@${realm}"
+        },
+        "name": "hive_server_hive",
+        "keytab": {
+          "owner": {
+            "access": "r",
+            "name": "${hive-env/hive_user}"
+          },
+          "file": "${keytab_dir}/hive.service.keytab",
+          "configuration": "hive-site/hive.server2.authentication.kerberos.keytab",
+          "group": {
+            "access": "",
+            "name": "${cluster-env/user_group}"
+          }
+        }
+      }, {
+        "principal": {
+          "configuration": "hive-site/hive.server2.authentication.spnego.principal",
+          "type": "service"
+        },
+        "name": "/spnego",
+        "keytab": {
+          "owner": {},
+          "configuration": "hive-site/hive.server2.authentication.spnego.keytab",
+          "group": {}
+        }
+      }],
+      "name": "HIVE_SERVER"
+    }, {
+      "identities": [{
+        "principal": {
+          "configuration": "hive-site/hive.metastore.kerberos.principal",
+          "type": "service",
+          "local_username": "${hive-env/hive_user}",
+          "value": "hive/_HOST@${realm}"
+        },
+        "name": "hive_metastore_hive",
+        "keytab": {
+          "owner": {
+            "access": "r",
+            "name": "${hive-env/hive_user}"
+          },
+          "file": "${keytab_dir}/hive.service.keytab",
+          "configuration": "hive-site/hive.metastore.kerberos.keytab.file",
+          "group": {
+            "access": "",
+            "name": "${cluster-env/user_group}"
+          }
+        }
+      }],
+      "name": "HIVE_METASTORE"
+    }, {
+      "identities": [{
+        "principal": {
+          "configuration": "webhcat-site/templeton.kerberos.principal",
+          "type": "service"
+        },
+        "name": "/spnego",
+        "keytab": {
+          "owner": {},
+          "configuration": "webhcat-site/templeton.kerberos.keytab",
+          "group": {}
+        }
+      }],
+      "name": "WEBHCAT_SERVER"
+    }],
+    "identities": [{
+      "name": "/spnego"
+    }, {
+      "name": "/smokeuser"
+    }],
+    "configurations": [{
+      "hive-site": {
+        "hive.metastore.sasl.enabled": "true",
+        "hive.server2.authentication": "KERBEROS"
+      }
+    }, {
+      "webhcat-site": {
+        "templeton.hive.properties": "hive.metastore.local=false,hive.metastore.uris=${clusterHostInfo/hive_metastore_host|each(thrift://%s:9083, \\\\,, \\s*\\,\\s*)},hive.metastore.sasl.enabled=true,hive.metastore.execute.setugi=true,hive.metastore.warehouse.dir=/apps/hive/warehouse,hive.exec.mode.local.auto=false,hive.metastore.kerberos.principal=hive/_HOST@${realm}",
+        "templeton.kerberos.secret": "secret"
+      }
+    }, {
+      "core-site": {
+        "hadoop.proxyuser.HTTP.hosts": "${clusterHostInfo/webhcat_server_host}"
+      }
+    }],
+    "name": "HIVE"
+  }, {
+    "components": [{
+      "identities": [{
+        "principal": {
+          "configuration": "ams-hbase-security-site/hbase.master.kerberos.principal",
+          "type": "service",
+          "local_username": "${ams-env/ambari_metrics_user}",
+          "value": "amshbasemaster/_HOST@${realm}"
+        },
+        "name": "ams_hbase_master_hbase",
+        "keytab": {
+          "owner": {
+            "access": "r",
+            "name": "${ams-env/ambari_metrics_user}"
+          },
+          "file": "${keytab_dir}/ams-hbase.master.keytab",
+          "configuration": "ams-hbase-security-site/hbase.master.keytab.file",
+          "group": {
+            "access": "",
+            "name": "${cluster-env/user_group}"
+          }
+        }
+      }, {
+        "principal": {
+          "configuration": "ams-hbase-security-site/hbase.regionserver.kerberos.principal",
+          "type": "service",
+          "local_username": "${ams-env/ambari_metrics_user}",
+          "value": "amshbasers/_HOST@${realm}"
+        },
+        "name": "ams_hbase_regionserver_hbase",
+        "keytab": {
+          "owner": {
+            "access": "r",
+            "name": "${ams-env/ambari_metrics_user}"
+          },
+          "file": "${keytab_dir}/ams-hbase.regionserver.keytab",
+          "configuration": "ams-hbase-security-site/hbase.regionserver.keytab.file",
+          "group": {
+            "access": "",
+            "name": "${cluster-env/user_group}"
+          }
+        }
+      }, {
+        "principal": {
+          "configuration": "ams-hbase-security-site/hbase.myclient.principal",
+          "type": "service",
+          "local_username": "${ams-env/ambari_metrics_user}",
+          "value": "amshbase/_HOST@${realm}"
+        },
+        "name": "ams_collector",
+        "keytab": {
+          "owner": {
+            "access": "r",
+            "name": "${ams-env/ambari_metrics_user}"
+          },
+          "file": "${keytab_dir}/ams.collector.keytab",
+          "configuration": "ams-hbase-security-site/hbase.myclient.keytab",
+          "group": {
+            "access": "",
+            "name": "${cluster-env/user_group}"
+          }
+        }
+      }, {
+        "principal": {
+          "configuration": "ams-hbase-security-site/ams.zookeeper.principal",
+          "type": "service",
+          "local_username": "${ams-env/ambari_metrics_user}",
+          "value": "amszk/_HOST@${realm}"
+        },
+        "name": "ams_zookeeper",
+        "keytab": {
+          "owner": {
+            "access": "r",
+            "name": "${ams-env/ambari_metrics_user}"
+          },
+          "file": "${keytab_dir}/ams-zk.service.keytab",
+          "configuration": "ams-hbase-security-site/ams.zookeeper.keytab",
+          "group": {
+            "access": "",
+            "name": "${cluster-env/user_group}"
+          }
+        }
+      }],
+      "configurations": [{
+        "ams-hbase-security-site": {
+          "hbase.coprocessor.master.classes": "org.apache.hadoop.hbase.security.access.AccessController",
+          "hadoop.security.authentication": "kerberos",
+          "hbase.security.authentication": "kerberos",
+          "hbase.coprocessor.region.classes": "org.apache.hadoop.hbase.security.token.TokenProvider,org.apache.hadoop.hbase.security.access.AccessController",
+          "hbase.security.authorization": "true",
+          "zookeeper.znode.parent": "/ams-hbase-secure",
+          "hbase.zookeeper.property.kerberos.removeRealmFromPrincipal": "true",
+          "hbase.zookeeper.property.jaasLoginRenew": "3600000",
+          "hbase.zookeeper.property.authProvider.1": "org.apache.zookeeper.server.auth.SASLAuthenticationProvider",
+          "hbase.zookeeper.property.kerberos.removeHostFromPrincipal": "true"
+        }
+      }],
+      "name": "METRICS_COLLECTOR"
+    }],
+    "identities": [{
+      "name": "/spnego"
+    }],
+    "name": "AMBARI_METRICS"
+  }, {
+    "components": [{
+      "identities": [{
+        "principal": {
+          "configuration": "kafka-env/kafka_principal_name",
+          "type": "service",
+          "value": "${kafka-env/kafka_user}/_HOST@${realm}"
+        },
+        "name": "kafka_broker",
+        "keytab": {
+          "owner": {
+            "access": "r",
+            "name": "${kafka-env/kafka_user}"
+          },
+          "file": "${keytab_dir}/kafka.service.keytab",
+          "configuration": "kafka-env/kafka_keytab",
+          "group": {
+            "access": "",
+            "name": "${cluster-env/user_group}"
+          }
+        }
+      }],
+      "name": "KAFKA_BROKER"
+    }],
+    "identities": [{
+      "name": "/smokeuser"
+    }],
+    "configurations": [{
+      "kafka-broker": {
+        "principal.to.local.class": "kafka.security.auth.KerberosPrincipalToLocal",
+        "authorizer.class.name": "kafka.security.auth.SimpleAclAuthorizer",
+        "super.users": "user:${kafka-env/kafka_user}",
+        "security.inter.broker.protocol": "PLAINTEXTSASL"
+      }
+    }],
+    "name": "KAFKA"
+  }, {
+    "components": [{
+      "identities": [{
+        "principal": {
+          "configuration": "falcon-startup.properties/*.falcon.service.authentication.kerberos.principal",
+          "type": "service",
+          "local_username": "${falcon-env/falcon_user}",
+          "value": "falcon/_HOST@${realm}"
+        },
+        "name": "falcon_server",
+        "keytab": {
+          "owner": {
+            "access": "r",
+            "name": "${falcon-env/falcon_user}"
+          },
+          "file": "${keytab_dir}/falcon.service.keytab",
+          "configuration": "falcon-startup.properties/*.falcon.service.authentication.kerberos.keytab",
+          "group": {
+            "access": "",
+            "name": "${cluster-env/user_group}"
+          }
+        }
+      }, {
+        "principal": {
+          "configuration": "falcon-startup.properties/*.falcon.http.authentication.kerberos.principal",
+          "type": "service",
+          "value": "HTTP/_HOST@${realm}"
+        },
+        "name": "/spnego",
+        "keytab": {
+          "owner": {},
+          "configuration": "falcon-startup.properties/*.falcon.http.authentication.kerberos.keytab",
+          "group": {}
+        }
+      }],
+      "name": "FALCON_SERVER"
+    }],
+    "identities": [{
+      "name": "/spnego"
+    }, {
+      "name": "/smokeuser"
+    }, {
+      "name": "/HDFS/hdfs"
+    }],
+    "auth_to_local_properties": [
+      "falcon-startup.properties/*.falcon.http.authentication.kerberos.name.rules|new_lines_escaped"
+    ],
+    "configurations": [{
+      "falcon-startup.properties": {
+        "*.dfs.namenode.kerberos.principal": "nn/_HOST@${realm}",
+        "*.falcon.http.authentication.type": "kerberos",
+        "*.falcon.authentication.type": "kerberos"
+      }
+    }],
+    "name": "FALCON"
+  }],
+  "properties": {
+    "additional_realms": "",
+    "keytab_dir": "/etc/security/keytabs",
+    "realm": "EXAMPLE.COM"
+  }
+}
\ No newline at end of file


[04/50] ambari git commit: AMBARI-14083. hadoop.proxyuser.hcat.hosts not updated when adding webhcat (onechiporenko)

Posted by nc...@apache.org.
AMBARI-14083. hadoop.proxyuser.hcat.hosts not updated when adding webhcat (onechiporenko)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/7312dbf1
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/7312dbf1
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/7312dbf1

Branch: refs/heads/branch-dev-patch-upgrade
Commit: 7312dbf1ef5f37c7c981dce40adbbf25d8efcbeb
Parents: b19bf6e
Author: Oleg Nechiporenko <on...@apache.org>
Authored: Fri Nov 27 11:53:34 2015 +0200
Committer: Oleg Nechiporenko <on...@apache.org>
Committed: Fri Nov 27 11:58:47 2015 +0200

----------------------------------------------------------------------
 ambari-web/app/controllers/main/host/details.js | 81 ++++++++++++--------
 ambari-web/app/messages.js                      |  2 +
 .../main/host/details/deleteComponentPopup.hbs  | 19 +++--
 .../test/controllers/main/host/details_test.js  | 60 +++++++++++++--
 4 files changed, 113 insertions(+), 49 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/7312dbf1/ambari-web/app/controllers/main/host/details.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/controllers/main/host/details.js b/ambari-web/app/controllers/main/host/details.js
index 2b7ae21..c3713fc 100644
--- a/ambari-web/app/controllers/main/host/details.js
+++ b/ambari-web/app/controllers/main/host/details.js
@@ -315,42 +315,27 @@ App.MainHostDetailsController = Em.Controller.extend(App.SupportClientConfigsDow
       bodyClass: Em.View.extend({
         templateName: require('templates/main/host/details/deleteComponentPopup')
       }),
-      isHiveMetastore: function () {
-        return componentName == 'HIVE_METASTORE';
-      }.property(),
-      deleteHiveMetastoreMsg: Em.View.extend({
-        template: Em.Handlebars.compile(Em.I18n.t('hosts.host.deleteComponent.popup.deleteHiveMetastore'))
-      }),
-      isNimbus: function () {
-        return componentName == 'NIMBUS';
-      }.property(),
-      deleteNimbusMsg: Em.View.extend({
-        template: Em.Handlebars.compile(Em.I18n.t('hosts.host.deleteComponent.popup.deleteNimbus'))
-      }),
-      isRangerKMSServer: function () {
-        return componentName == 'RANGER_KMS_SERVER';
-      }.property(),
-      deleteRangerKMSServereMsg: Em.View.extend({
-        template: Em.Handlebars.compile(Em.I18n.t('hosts.host.deleteComponent.popup.deleteRangerKMSServer'))
-      }),
+      isHiveMetastore: componentName == 'HIVE_METASTORE',
+      isWebHCatServer: componentName == 'WEBHCAT_SERVER',
+      isNimbus: componentName == 'NIMBUS',
+      isRangerKMSServer: componentName == 'RANGER_KMS_SERVER',
+      isZkServer: componentName == 'ZOOKEEPER_SERVER',
+
+      deleteHiveMetastoreMsg: Em.I18n.t('hosts.host.deleteComponent.popup.deleteHiveMetastore'),
+      deleteWebHCatServerMsg: Em.I18n.t('hosts.host.deleteComponent.popup.deleteWebHCatServer'),
+      deleteNimbusMsg: Em.I18n.t('hosts.host.deleteComponent.popup.deleteNimbus'),
+      deleteRangerKMSServereMsg: Em.I18n.t('hosts.host.deleteComponent.popup.deleteRangerKMSServer'),
+      lastComponentError: Em.I18n.t('hosts.host.deleteComponent.popup.warning').format(displayName),
+      deleteComponentMsg: Em.I18n.t('hosts.host.deleteComponent.popup.msg1').format(displayName),
+      deleteZkServerMsg: Em.I18n.t('hosts.host.deleteComponent.popup.deleteZooKeeperServer'),
+
       isChecked: false,
       disablePrimary: Em.computed.not('isChecked'),
       lastComponent: function () {
         this.set('isChecked', !isLastComponent);
         return isLastComponent;
       }.property(),
-      isZkServer: function () {
-        return componentName == 'ZOOKEEPER_SERVER';
-      }.property(),
-      lastComponentError: Em.View.extend({
-        template: Em.Handlebars.compile(Em.I18n.t('hosts.host.deleteComponent.popup.warning').format(displayName))
-      }),
-      deleteComponentMsg: function () {
-        return Em.I18n.t('hosts.host.deleteComponent.popup.msg1').format(displayName);
-      }.property(),
-      deleteZkServerMsg: Em.View.extend({
-        template: Em.Handlebars.compile(Em.I18n.t('hosts.host.deleteComponent.popup.deleteZooKeeperServer'))
-      }),
+
       onPrimary: function () {
         var popup = this;
         self._doDeleteHostComponent(component, function () {
@@ -431,6 +416,9 @@ App.MainHostDetailsController = Em.Controller.extend(App.SupportClientConfigsDow
     } else if (data.componentName == 'HIVE_METASTORE') {
       this.set('deleteHiveMetaStore', true);
       this.loadConfigs('loadHiveConfigs');
+    } else if (data.componentName == 'WEBHCAT_SERVER') {
+      this.set('deleteWebHCatServer', true);
+      this.loadConfigs('loadHiveConfigs');
     } else if (data.componentName == 'HIVE_SERVER') {
       this.set('deleteHiveServer', true);
       this.loadConfigs('loadHiveConfigs');
@@ -586,6 +574,12 @@ App.MainHostDetailsController = Em.Controller.extend(App.SupportClientConfigsDow
           self.loadConfigs("loadHiveConfigs");
         }, Em.I18n.t('hosts.host.addComponent.' + componentName) + manualKerberosWarning);
         break;
+      case 'WEBHCAT_SERVER':
+        returnFunc = App.showConfirmationPopup(function () {
+          self.set('webhcatServerHost', hostName);
+          self.loadConfigs("loadHiveConfigs");
+        }, Em.I18n.t('hosts.host.addComponent.' + componentName) + manualKerberosWarning);
+        break;
       case 'NIMBUS':
         returnFunc = App.showConfirmationPopup(function () {
           self.set('nimbusHost', hostName);
@@ -835,6 +829,7 @@ App.MainHostDetailsController = Em.Controller.extend(App.SupportClientConfigsDow
   onLoadHiveConfigs: function (data) {
     var
       hiveMetastoreHost = this.get('hiveMetastoreHost'),
+      webhcatServerHost = this.get('webhcatServerHost'),
       hiveMSHosts = this.getHiveHosts(),
       hiveMasterHosts = hiveMSHosts.concat(App.HostComponent.find().filterProperty('componentName', 'HIVE_SERVER').mapProperty('hostName')).uniq().sort().join(','),
       configs = {},
@@ -883,7 +878,11 @@ App.MainHostDetailsController = Em.Controller.extend(App.SupportClientConfigsDow
         }
       }
     ];
-    this.saveConfigsBatch(groups, this.get('addHiveServer') ? 'HIVE_SERVER' : 'HIVE_METASTORE', hiveMetastoreHost);
+    var params = [groups];
+    var componentName = this.get('addHiveServer') ? 'HIVE_SERVER' : (hiveMetastoreHost ? 'HIVE_METASTORE' : 'WEBHCAT_SERVER');
+    var host = webhcatServerHost || hiveMetastoreHost;
+    params.pushObjects([componentName, host]);
+    this.saveConfigsBatch.apply(this, params);
     this.set('addHiveServer', false);
   },
 
@@ -946,21 +945,35 @@ App.MainHostDetailsController = Em.Controller.extend(App.SupportClientConfigsDow
    */
   deleteHiveMetaStore: false,
 
+  /**
+   * Delete WebHCat Server is performed
+   *
+   * @type {bool}
+   */
+  deleteWebHCatServer: false,
+
   getHiveHosts: function () {
     var
       hiveHosts = App.HostComponent.find().filterProperty('componentName', 'HIVE_METASTORE').mapProperty('hostName'),
-      hiveMetastoreHost = this.get('hiveMetastoreHost');
+      hiveMetastoreHost = this.get('hiveMetastoreHost'),
+      webhcatServerHost = this.get('webhcatServerHost');
 
     if (!!hiveMetastoreHost) {
       hiveHosts.push(hiveMetastoreHost);
       this.set('hiveMetastoreHost', '');
     }
 
-    if (this.get('fromDeleteHost') || this.get('deleteHiveMetaStore') || this.get('deleteHiveServer')) {
+    if (!!webhcatServerHost) {
+      hiveHosts.push(webhcatServerHost);
+      this.set('webhcatServerHost' ,'');
+    }
+
+    if (this.get('fromDeleteHost') || this.get('deleteHiveMetaStore') || this.get('deleteHiveServer') || this.get('deleteWebHCatServer')) {
       this.set('deleteHiveMetaStore', false);
       this.set('deleteHiveServer', false);
+      this.set('deleteWebHCatServer', false);
       this.set('fromDeleteHost', false);
-      return hiveHosts.without(this.get('content.hostName'));
+      hiveHosts = hiveHosts.without(this.get('content.hostName'));
     }
     return hiveHosts.sort();
   },

http://git-wip-us.apache.org/repos/asf/ambari/blob/7312dbf1/ambari-web/app/messages.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/messages.js b/ambari-web/app/messages.js
index 7c7ca57..502a7a2 100644
--- a/ambari-web/app/messages.js
+++ b/ambari-web/app/messages.js
@@ -2321,7 +2321,9 @@ Em.I18n.translations = {
   'hosts.host.decommissioned':'Decommissioned',
   'hosts.host.decommissioning':'Decommissioning',
   'hosts.host.addComponent.HIVE_METASTORE':'Adding <i>Hive Metastore</i> will reconfigure such properties:<ul><li>hive.metastore.uris</li><li>templeton.hive.properties</li></ul>',
+  'hosts.host.addComponent.WEBHCAT_SERVER':'Adding <i>WebHCat Server</i> will reconfigure such properties:<ul><li>hive.metastore.uris</li><li>templeton.hive.properties</li></ul>',
   'hosts.host.deleteComponent.popup.deleteHiveMetastore':'Deleting <i>Hive Metastore</i> will reconfigure such properties:<ul><li>hive.metastore.uris</li><li>templeton.hive.properties</li></ul>',
+  'hosts.host.deleteComponent.popup.deleteWebHCatServer':'Deleting <i>WebHCat Server</i> will reconfigure such properties:<ul><li>hive.metastore.uris</li><li>templeton.hive.properties</li></ul>',
   'hosts.host.configs.save.note': 'This configuration is created by ambari while installing/deleting {0} component on a host',
 
   'hosts.component.passive.implied.host.mode.tooltip':'Cannot Turn Off Maintenance Mode because Host is in Maintenance Mode',

http://git-wip-us.apache.org/repos/asf/ambari/blob/7312dbf1/ambari-web/app/templates/main/host/details/deleteComponentPopup.hbs
----------------------------------------------------------------------
diff --git a/ambari-web/app/templates/main/host/details/deleteComponentPopup.hbs b/ambari-web/app/templates/main/host/details/deleteComponentPopup.hbs
index d157aa9..4fd5c52 100644
--- a/ambari-web/app/templates/main/host/details/deleteComponentPopup.hbs
+++ b/ambari-web/app/templates/main/host/details/deleteComponentPopup.hbs
@@ -16,26 +16,25 @@
 * limitations under the License.
 }}
 
-{{deleteComponentMsg}}<br/><br/>
+<p>{{{deleteComponentMsg}}}</p>
 {{#if lastComponent}}
   <div class="alert-error row-fluid">
     <div class='tinyspan tinyoffset'>{{view Ember.Checkbox checkedBinding="isChecked"}}</div>
-    <div class='span10'>{{view lastComponentError}}</div>
+    <div class='span10'>{{{lastComponentError}}}</div>
   </div>
 {{/if}}
 {{#if isZkServer}}
-  <br/>
-  <div class='alert'>{{view deleteZkServerMsg}}</div>
+  <div class='alert'>{{{deleteZkServerMsg}}}</div>
 {{/if}}
 {{#if isHiveMetastore}}
-  <br/>
-  <div class='alert'>{{view deleteHiveMetastoreMsg}}</div>
+  <div class='alert'>{{{deleteHiveMetastoreMsg}}}</div>
+{{/if}}
+{{#if isWebHCatServer}}
+  <div class='alert'>{{{deleteWebHCatServerMsg}}}</div>
 {{/if}}
 {{#if isNimbus}}
-  <br/>
-  <div class='alert'>{{view deleteNimbusMsg}}</div>
+  <div class='alert'>{{{deleteNimbusMsg}}}</div>
 {{/if}}
 {{#if isRangerKMSServer}}
-  <br/>
-  <div class='alert'>{{view deleteRangerKMSServereMsg}}</div>
+  <div class='alert'>{{{deleteRangerKMSServereMsg}}}</div>
 {{/if}}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/7312dbf1/ambari-web/test/controllers/main/host/details_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/controllers/main/host/details_test.js b/ambari-web/test/controllers/main/host/details_test.js
index 64d5c79..f0a2874 100644
--- a/ambari-web/test/controllers/main/host/details_test.js
+++ b/ambari-web/test/controllers/main/host/details_test.js
@@ -526,6 +526,15 @@ describe('App.MainHostDetailsController', function () {
       controller.addComponent(event);
       expect(App.showConfirmationPopup.calledOnce).to.be.true;
     });
+    it('add WEBHCAT_SERVER', function () {
+      var event = {
+        context: Em.Object.create({
+          componentName: 'WEBHCAT_SERVER'
+        })
+      };
+      controller.addComponent(event);
+      expect(App.showConfirmationPopup.calledOnce).to.be.true;
+    });
     it('add slave component', function () {
       var event = {
         context: Em.Object.create({
@@ -2969,7 +2978,8 @@ describe('App.MainHostDetailsController', function () {
         'input': {
           'hiveMetastoreHost': '',
           'fromDeleteHost': false,
-          'deleteHiveMetaStore': false
+          'deleteHiveMetaStore': false,
+          'deleteWebHCatServer': false
         },
         'hiveHosts': ['h1', 'h2'],
         'title': 'adding HiveServer2'
@@ -2978,17 +2988,29 @@ describe('App.MainHostDetailsController', function () {
         'input': {
           'hiveMetastoreHost': 'h0',
           'fromDeleteHost': false,
-          'deleteHiveMetaStore': false
+          'deleteHiveMetaStore': false,
+          'deleteWebHCatServer': false
         },
         'hiveHosts': ['h0', 'h1', 'h2'],
         'title': 'adding Hive Metastore'
       },
       {
         'input': {
+          'webhcatServerHost': 'h0',
+          'fromDeleteHost': false,
+          'deleteHiveMetaStore': false,
+          'deleteWebHCatServer': false
+        },
+        'hiveHosts': ['h0', 'h1', 'h2'],
+        'title': 'adding WebHCat Server'
+      },
+      {
+        'input': {
           'hiveMetastoreHost': '',
           'content.hostName': 'h1',
           'fromDeleteHost': false,
-          'deleteHiveMetaStore': true
+          'deleteHiveMetaStore': true,
+          'deleteWebHCatServer': false
         },
         'hiveHosts': ['h2'],
         'title': 'deleting Hive component'
@@ -2996,12 +3018,35 @@ describe('App.MainHostDetailsController', function () {
       {
         'input': {
           'hiveMetastoreHost': '',
+          'content.hostName': 'h4',
+          'fromDeleteHost': false,
+          'deleteHiveMetaStore': false,
+          'deleteWebHCatServer': true
+        },
+        'hiveHosts': ['h1', 'h2'],
+        'title': 'deleting WebHCat Server'
+      },
+      {
+        'input': {
+          'hiveMetastoreHost': '',
           'content.hostName': 'h2',
           'fromDeleteHost': true,
-          'deleteHiveMetaStore': false
+          'deleteHiveMetaStore': false,
+          'deleteWebHCatServer': false
         },
         'hiveHosts': ['h1'],
         'title': 'deleting host with Hive component'
+      },
+      {
+        'input': {
+          'webhcatServerHost': '',
+          'content.hostName': 'h2',
+          'fromDeleteHost': true,
+          'deleteHiveMetaStore': false,
+          'deleteWebHCatServer': false
+        },
+        'hiveHosts': ['h1'],
+        'title': 'deleting host with WebHCat Server'
       }
     ];
 
@@ -3018,6 +3063,10 @@ describe('App.MainHostDetailsController', function () {
         {
           componentName: 'HIVE_SERVER',
           hostName: 'h3'
+        },
+        {
+          componentName: 'WEBHCAT_SERVER',
+          hostName: 'h4'
         }
       ]);
     });
@@ -3031,8 +3080,9 @@ describe('App.MainHostDetailsController', function () {
         Em.keys(item.input).forEach(function (key) {
           controller.set(key, item.input[key]);
         });
-        expect(controller.getHiveHosts()).to.eql(item.hiveHosts);
+        expect(controller.getHiveHosts().toArray()).to.eql(item.hiveHosts);
         expect(controller.get('hiveMetastoreHost')).to.be.empty;
+        expect(controller.get('webhcatServerHost')).to.be.empty;
         expect(controller.get('fromDeleteHost')).to.be.false;
         expect(controller.get('deleteHiveMetaStore')).to.be.false;
       });


[25/50] ambari git commit: AMBARI-14100. RU and EU upgrade failed on first step (dlysnichenko)

Posted by nc...@apache.org.
AMBARI-14100. RU and EU upgrade failed on first step (dlysnichenko)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/93bf1d37
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/93bf1d37
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/93bf1d37

Branch: refs/heads/branch-dev-patch-upgrade
Commit: 93bf1d370c6df7cfd50123c98aa0b8ac0582e962
Parents: c838f31
Author: Lisnichenko Dmitro <dl...@hortonworks.com>
Authored: Mon Nov 30 20:13:50 2015 +0200
Committer: Lisnichenko Dmitro <dl...@hortonworks.com>
Committed: Mon Nov 30 20:14:50 2015 +0200

----------------------------------------------------------------------
 .../libraries/functions/conf_select.py              | 16 ++++++++++------
 1 file changed, 10 insertions(+), 6 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/93bf1d37/ambari-common/src/main/python/resource_management/libraries/functions/conf_select.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/libraries/functions/conf_select.py b/ambari-common/src/main/python/resource_management/libraries/functions/conf_select.py
index 9ed5fb8..24ca151 100644
--- a/ambari-common/src/main/python/resource_management/libraries/functions/conf_select.py
+++ b/ambari-common/src/main/python/resource_management/libraries/functions/conf_select.py
@@ -28,7 +28,7 @@ import subprocess
 from resource_management.core import shell
 from resource_management.libraries.script.script import Script
 from resource_management.core.logger import Logger
-from resource_management.core.resources.system import Directory
+from resource_management.core.resources.system import Directory, Link
 
 PACKAGE_DIRS = {
   "accumulo": [
@@ -259,19 +259,23 @@ def select(stack_name, package, version, try_create=True):
           normalized_current_dir = (os.path.normpath(real_path_of_current_dir)).strip()
           Logger.info("Normalized Conf Dir : {0}, Normalized Current Dir : {1}".format(normalized_conf_dir, normalized_current_dir))
           if os.path.isdir(normalized_current_dir) and normalized_current_dir != normalized_conf_dir:
-            if not os.path.isdir(normalized_conf_dir) :
-              os.symlink(normalized_current_dir, normalized_conf_dir)
+            if not os.path.isdir(normalized_conf_dir) and not os.path.islink(normalized_conf_dir):
+              Link(normalized_conf_dir,
+                   to=normalized_current_dir)
               Logger.info("{0} directory doesn't exist. Created Symlink : {1} -> {2}".format(normalized_conf_dir, normalized_conf_dir, normalized_current_dir))
               return
             # In case, 'normalized_conf_dir' does have a symlink and it's not the one mentioned in 'PACKAGE_DIRS',
             # we remove the symlink and make it point to correct symlink.
-            if (os.path.islink(normalized_conf_dir) and os.readlink(normalized_conf_dir) != normalized_current_dir):
+            if os.path.islink(normalized_conf_dir) and os.readlink(normalized_conf_dir) != normalized_current_dir:
               Logger.info("{0} exists and points to incorrect path {1}".format(normalized_conf_dir, os.readlink(normalized_conf_dir)))
-              os.remove(normalized_conf_dir)
+              Link(normalized_conf_dir,
+                   action="delete")
               Logger.info("Removed existing symlink for {0}".format(normalized_conf_dir))
-              os.symlink(normalized_current_dir, normalized_conf_dir)
+              Link(normalized_conf_dir,
+                   to=normalized_current_dir)
               Logger.info("Created Symlink : {0} -> {1}".format(normalized_conf_dir, normalized_current_dir))
 
+
 def get_hadoop_conf_dir(force_latest_on_upgrade=False):
   """
   Gets the shared hadoop conf directory using:


[08/50] ambari git commit: AMBARI-14098 AMS auto start alert is present on cluster with HA after enabling MIT security (dsen)

Posted by nc...@apache.org.
AMBARI-14098 AMS auto start alert is present on cluster with HA after enabling MIT security (dsen)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/ebdda3bc
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/ebdda3bc
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/ebdda3bc

Branch: refs/heads/branch-dev-patch-upgrade
Commit: ebdda3bcf051d31e1cb49204ee662e6480aa44bc
Parents: 667e1e8
Author: Dmytro Sen <ds...@apache.org>
Authored: Fri Nov 27 16:39:18 2015 +0200
Committer: Dmytro Sen <ds...@apache.org>
Committed: Fri Nov 27 16:39:18 2015 +0200

----------------------------------------------------------------------
 .../src/main/python/ambari_agent/alerts/recovery_alert.py        | 4 ++--
 .../resources/common-services/AMBARI_METRICS/0.1.0/alerts.json   | 4 ++--
 2 files changed, 4 insertions(+), 4 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/ebdda3bc/ambari-agent/src/main/python/ambari_agent/alerts/recovery_alert.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/main/python/ambari_agent/alerts/recovery_alert.py b/ambari-agent/src/main/python/ambari_agent/alerts/recovery_alert.py
index 66b4b24..60744b5 100644
--- a/ambari-agent/src/main/python/ambari_agent/alerts/recovery_alert.py
+++ b/ambari-agent/src/main/python/ambari_agent/alerts/recovery_alert.py
@@ -24,8 +24,8 @@ from alerts.base_alert import BaseAlert
 logger = logging.getLogger()
 
 # default recoveries counts
-DEFAULT_WARNING_RECOVERIES_COUNT = 1
-DEFAULT_CRITICAL_RECOVERIES_COUNT = 5
+DEFAULT_WARNING_RECOVERIES_COUNT = 2
+DEFAULT_CRITICAL_RECOVERIES_COUNT = 4
 
 UNKNOWN_COMPONENT = 'UNKNOWN_COMPONENT'
 class RecoveryAlert(BaseAlert):

http://git-wip-us.apache.org/repos/asf/ambari/blob/ebdda3bc/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/alerts.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/alerts.json b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/alerts.json
index 3f2fbfd..964403e 100644
--- a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/alerts.json
+++ b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/alerts.json
@@ -43,11 +43,11 @@
             },
             "warning": {
               "text": "Metrics Collector has been auto-started {1} times{0}.",
-              "count": 1
+              "count": 2
             },
             "critical": {
               "text": "Metrics Collector has been auto-started {1} times{0}.",
-              "count": 5
+              "count": 4
             }
           }
         }


[47/50] ambari git commit: AMBARI-14130 Ambari Web UI stuck with wire encryption enabled and no truststore setup. (atkach)

Posted by nc...@apache.org.
AMBARI-14130 Ambari Web UI stuck with wire encryption enabled and no truststore setup. (atkach)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/730e6f47
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/730e6f47
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/730e6f47

Branch: refs/heads/branch-dev-patch-upgrade
Commit: 730e6f47cd51a6d691e41371f432a0e7d9a02bbf
Parents: fd29b08
Author: Andrii Tkach <at...@hortonworks.com>
Authored: Tue Dec 1 14:14:58 2015 +0200
Committer: Andrii Tkach <at...@hortonworks.com>
Committed: Tue Dec 1 14:14:58 2015 +0200

----------------------------------------------------------------------
 ambari-web/app/routes/main.js | 9 +--------
 1 file changed, 1 insertion(+), 8 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/730e6f47/ambari-web/app/routes/main.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/routes/main.js b/ambari-web/app/routes/main.js
index 90a57fe..990abb6 100644
--- a/ambari-web/app/routes/main.js
+++ b/ambari-web/app/routes/main.js
@@ -639,14 +639,7 @@ module.exports = Em.Route.extend(App.RouterRedirections, {
             //if service is not existed then route to default service
             if (item.get('isLoaded')) {
               if (router.get('mainServiceItemController.isConfigurable')) {
-                // HDFS service config page requires service metrics information to determine NameNode HA state and hide SNameNode category
-                if (item.get('serviceName') === 'HDFS') {
-                  router.get('mainController').isLoading.call(router.get('clusterController'), 'isServiceContentFullyLoaded').done(function () {
-                    router.get('mainServiceItemController').connectOutlet('mainServiceInfoConfigs', item);
-                  });
-                } else {
-                  router.get('mainServiceItemController').connectOutlet('mainServiceInfoConfigs', item);
-                }
+                router.get('mainServiceItemController').connectOutlet('mainServiceInfoConfigs', item);
               }
               else {
                 // if service doesn't have configs redirect to summary


[10/50] ambari git commit: AMBARI-14105 Oozie/Hive "Database User" field validation missed. (ababiichuk)

Posted by nc...@apache.org.
AMBARI-14105 Oozie/Hive "Database User" field validation missed. (ababiichuk)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/5becb314
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/5becb314
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/5becb314

Branch: refs/heads/branch-dev-patch-upgrade
Commit: 5becb314e91e79f13911e85dbe82e95b50a19e58
Parents: 01f7d70
Author: aBabiichuk <ab...@cybervisiontech.com>
Authored: Fri Nov 27 17:48:34 2015 +0200
Committer: aBabiichuk <ab...@cybervisiontech.com>
Committed: Fri Nov 27 17:48:34 2015 +0200

----------------------------------------------------------------------
 .../HIVE/0.12.0.2.0/configuration/hive-site.xml              | 1 +
 .../OOZIE/4.0.0.2.0/configuration/oozie-site.xml             | 1 +
 .../OOZIE/4.2.0.2.3/configuration/oozie-site.xml             | 1 +
 .../stacks/HDP/2.1/services/HIVE/configuration/hive-site.xml | 1 +
 .../HDP/2.1/services/OOZIE/configuration/oozie-site.xml      | 1 +
 .../stacks/HDP/2.2/services/HIVE/configuration/hive-site.xml | 1 +
 .../HDPWIN/2.2/services/HIVE/configuration/hive-site.xml     | 1 +
 .../app/models/configs/objects/service_config_property.js    | 8 ++------
 8 files changed, 9 insertions(+), 6 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/5becb314/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/configuration/hive-site.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/configuration/hive-site.xml b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/configuration/hive-site.xml
index 29b4796..08cc4d7 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/configuration/hive-site.xml
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/configuration/hive-site.xml
@@ -69,6 +69,7 @@ limitations under the License.
     <display-name>Database Username</display-name>
     <description>username to use against metastore database</description>
     <value-attributes>
+      <type>db_user</type>
       <overridable>false</overridable>
     </value-attributes>
   </property>

http://git-wip-us.apache.org/repos/asf/ambari/blob/5becb314/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/configuration/oozie-site.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/configuration/oozie-site.xml b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/configuration/oozie-site.xml
index 860a111..57cc8a7 100644
--- a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/configuration/oozie-site.xml
+++ b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/configuration/oozie-site.xml
@@ -227,6 +227,7 @@
       Database user name to use to connect to the database
     </description>
     <value-attributes>
+      <type>db_user</type>
       <overridable>false</overridable>
     </value-attributes>
   </property>

http://git-wip-us.apache.org/repos/asf/ambari/blob/5becb314/ambari-server/src/main/resources/common-services/OOZIE/4.2.0.2.3/configuration/oozie-site.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/OOZIE/4.2.0.2.3/configuration/oozie-site.xml b/ambari-server/src/main/resources/common-services/OOZIE/4.2.0.2.3/configuration/oozie-site.xml
index 0e0a79d..b25cfcb 100644
--- a/ambari-server/src/main/resources/common-services/OOZIE/4.2.0.2.3/configuration/oozie-site.xml
+++ b/ambari-server/src/main/resources/common-services/OOZIE/4.2.0.2.3/configuration/oozie-site.xml
@@ -108,6 +108,7 @@
       Database user name to use to connect to the database
     </description>
     <value-attributes>
+      <type>db_user</type>
       <overridable>false</overridable>
     </value-attributes>
   </property>

http://git-wip-us.apache.org/repos/asf/ambari/blob/5becb314/ambari-server/src/main/resources/stacks/HDP/2.1/services/HIVE/configuration/hive-site.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.1/services/HIVE/configuration/hive-site.xml b/ambari-server/src/main/resources/stacks/HDP/2.1/services/HIVE/configuration/hive-site.xml
index c8ba0a9..aad9ba5 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.1/services/HIVE/configuration/hive-site.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.1/services/HIVE/configuration/hive-site.xml
@@ -67,6 +67,7 @@ limitations under the License.
     <display-name>Database Username</display-name>
     <description>username to use against metastore database</description>
     <value-attributes>
+      <type>db_user</type>
       <overridable>false</overridable>
     </value-attributes>
   </property>

http://git-wip-us.apache.org/repos/asf/ambari/blob/5becb314/ambari-server/src/main/resources/stacks/HDP/2.1/services/OOZIE/configuration/oozie-site.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.1/services/OOZIE/configuration/oozie-site.xml b/ambari-server/src/main/resources/stacks/HDP/2.1/services/OOZIE/configuration/oozie-site.xml
index e77cdbb..af1a0a8 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.1/services/OOZIE/configuration/oozie-site.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.1/services/OOZIE/configuration/oozie-site.xml
@@ -226,6 +226,7 @@
       Database user name to use to connect to the database
     </description>
     <value-attributes>
+      <type>db_user</type>
       <overridable>false</overridable>
     </value-attributes>
   </property>

http://git-wip-us.apache.org/repos/asf/ambari/blob/5becb314/ambari-server/src/main/resources/stacks/HDP/2.2/services/HIVE/configuration/hive-site.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2/services/HIVE/configuration/hive-site.xml b/ambari-server/src/main/resources/stacks/HDP/2.2/services/HIVE/configuration/hive-site.xml
index 3058fc4..20c04db 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.2/services/HIVE/configuration/hive-site.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2/services/HIVE/configuration/hive-site.xml
@@ -436,6 +436,7 @@ limitations under the License.
     <display-name>Database Username</display-name>
     <description>username to use against metastore database</description>
     <value-attributes>
+      <type>db_user</type>
       <overridable>false</overridable>
     </value-attributes>
   </property>

http://git-wip-us.apache.org/repos/asf/ambari/blob/5becb314/ambari-server/src/main/resources/stacks/HDPWIN/2.2/services/HIVE/configuration/hive-site.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDPWIN/2.2/services/HIVE/configuration/hive-site.xml b/ambari-server/src/main/resources/stacks/HDPWIN/2.2/services/HIVE/configuration/hive-site.xml
index be7bc7e..d771748 100644
--- a/ambari-server/src/main/resources/stacks/HDPWIN/2.2/services/HIVE/configuration/hive-site.xml
+++ b/ambari-server/src/main/resources/stacks/HDPWIN/2.2/services/HIVE/configuration/hive-site.xml
@@ -404,6 +404,7 @@ limitations under the License.
     <display-name>Database Username</display-name>
     <description>username to use against metastore database</description>
     <value-attributes>
+      <type>db_user</type>
       <overridable>false</overridable>
     </value-attributes>
   </property>

http://git-wip-us.apache.org/repos/asf/ambari/blob/5becb314/ambari-web/app/models/configs/objects/service_config_property.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/models/configs/objects/service_config_property.js b/ambari-web/app/models/configs/objects/service_config_property.js
index 5e23735..fc723c7 100644
--- a/ambari-web/app/models/configs/objects/service_config_property.js
+++ b/ambari-web/app/models/configs/objects/service_config_property.js
@@ -430,14 +430,10 @@ App.ServiceConfigProperty = Em.Object.extend({
           }
           break;
         case 'user':
-          if (!validator.isValidUNIXUser(value)){
-            this.set('errorMessage', 'Username is not valid');
-            isError = true;
-          }
-          break;
         case 'database':
+        case 'db_user':
           if (!validator.isValidDbName(value)){
-            this.set('errorMessage', 'Database is not valid');
+            this.set('errorMessage', 'Value is not valid');
             isError = true;
           }
           break;