You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by nc...@apache.org on 2016/02/03 20:14:15 UTC

[01/16] ambari git commit: AMBARI-14876. Ambari Agent Creating 100, 000 Empty Status Command Files (aonishuk)

Repository: ambari
Updated Branches:
  refs/heads/branch-dev-patch-upgrade 4c5d2bd0a -> c72dc41db


AMBARI-14876. Ambari Agent Creating 100,000 Empty Status Command Files (aonishuk)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/9cc01b45
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/9cc01b45
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/9cc01b45

Branch: refs/heads/branch-dev-patch-upgrade
Commit: 9cc01b45ec9d534c73d10c2b0d1e4c7639883d69
Parents: 8ba3d0b
Author: Andrew Onishuk <ao...@hortonworks.com>
Authored: Tue Feb 2 16:19:57 2016 +0200
Committer: Andrew Onishuk <ao...@hortonworks.com>
Committed: Tue Feb 2 16:19:57 2016 +0200

----------------------------------------------------------------------
 .../python/ambari_agent/CustomServiceOrchestrator.py  | 11 ++++++++---
 .../src/main/python/ambari_agent/PythonExecutor.py    | 14 ++++++++------
 .../python/ambari_agent/PythonReflectiveExecutor.py   |  5 +++--
 3 files changed, 19 insertions(+), 11 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/9cc01b45/ambari-agent/src/main/python/ambari_agent/CustomServiceOrchestrator.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/main/python/ambari_agent/CustomServiceOrchestrator.py b/ambari-agent/src/main/python/ambari_agent/CustomServiceOrchestrator.py
index 6c1a161..1bc045c 100644
--- a/ambari-agent/src/main/python/ambari_agent/CustomServiceOrchestrator.py
+++ b/ambari-agent/src/main/python/ambari_agent/CustomServiceOrchestrator.py
@@ -56,8 +56,11 @@ class CustomServiceOrchestrator():
   IPV4_ADDRESSES_KEY = "all_ipv4_ips"
 
   AMBARI_SERVER_HOST = "ambari_server_host"
-  DONT_DEBUG_FAILURES_FOR_COMMANDS = [COMMAND_NAME_SECURITY_STATUS, COMMAND_NAME_STATUS]
-  REFLECTIVELY_RUN_COMMANDS = [COMMAND_NAME_SECURITY_STATUS, COMMAND_NAME_STATUS] # -- commands which run a lot and often (this increases their speed)
+
+  FREQUENT_COMMANDS = [COMMAND_NAME_SECURITY_STATUS, COMMAND_NAME_STATUS]
+  DONT_DEBUG_FAILURES_FOR_COMMANDS = FREQUENT_COMMANDS
+  REFLECTIVELY_RUN_COMMANDS = FREQUENT_COMMANDS # -- commands which run a lot and often (this increases their speed)
+  DONT_BACKUP_LOGS_FOR_COMMANDS = FREQUENT_COMMANDS
 
   def __init__(self, config, controller):
     self.config = config
@@ -185,13 +188,15 @@ class CustomServiceOrchestrator():
         raise AgentException("Background commands are supported without hooks only")
 
       python_executor = self.get_py_executor(forced_command_name)
+      backup_log_files = not command_name in self.DONT_BACKUP_LOGS_FOR_COMMANDS
       for py_file, current_base_dir in filtered_py_file_list:
         log_info_on_failure = not command_name in self.DONT_DEBUG_FAILURES_FOR_COMMANDS
         script_params = [command_name, json_path, current_base_dir, tmpstrucoutfile, logger_level, self.exec_tmp_dir]
         ret = python_executor.run_file(py_file, script_params,
                                tmpoutfile, tmperrfile, timeout,
                                tmpstrucoutfile, self.map_task_to_process,
-                               task_id, override_output_files, handle = handle, log_info_on_failure=log_info_on_failure)
+                               task_id, override_output_files, backup_log_files = backup_log_files,
+                               handle = handle, log_info_on_failure=log_info_on_failure)
         # Next run_file() invocations should always append to current output
         override_output_files = False
         if ret['exitcode'] != 0:

http://git-wip-us.apache.org/repos/asf/ambari/blob/9cc01b45/ambari-agent/src/main/python/ambari_agent/PythonExecutor.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/main/python/ambari_agent/PythonExecutor.py b/ambari-agent/src/main/python/ambari_agent/PythonExecutor.py
index 352974f..0d431bc 100644
--- a/ambari-agent/src/main/python/ambari_agent/PythonExecutor.py
+++ b/ambari-agent/src/main/python/ambari_agent/PythonExecutor.py
@@ -53,11 +53,12 @@ class PythonExecutor(object):
     pass
 
 
-  def open_subprocess_files(self, tmpoutfile, tmperrfile, override_output_files):
-    if override_output_files: # Recreate files, existing files are backed up
-      self.back_up_log_file_if_exists(tmpoutfile)
+  def open_subprocess_files(self, tmpoutfile, tmperrfile, override_output_files, backup_log_files = True):
+    if override_output_files: # Recreate files, existing files are backed up if backup_log_files is True
+      if backup_log_files:
+        self.back_up_log_file_if_exists(tmpoutfile)
+        self.back_up_log_file_if_exists(tmperrfile)
       tmpout =  open(tmpoutfile, 'w')
-      self.back_up_log_file_if_exists(tmperrfile)
       tmperr =  open(tmperrfile, 'w')
     else: # Append to files
       tmpout =  open(tmpoutfile, 'a')
@@ -78,7 +79,8 @@ class PythonExecutor(object):
 
   def run_file(self, script, script_params, tmpoutfile, tmperrfile,
                timeout, tmpstructedoutfile, callback, task_id,
-               override_output_files = True, handle = None, log_info_on_failure=True):
+               override_output_files = True, backup_log_files = True, handle = None,
+               log_info_on_failure = True):
     """
     Executes the specified python file in a separate subprocess.
     Method returns only when the subprocess is finished.
@@ -94,7 +96,7 @@ class PythonExecutor(object):
     logger.debug("Running command " + pprint.pformat(pythonCommand))
     
     if handle is None:
-      tmpout, tmperr = self.open_subprocess_files(tmpoutfile, tmperrfile, override_output_files)
+      tmpout, tmperr = self.open_subprocess_files(tmpoutfile, tmperrfile, override_output_files, backup_log_files)
 
       process = self.launch_python_subprocess(pythonCommand, tmpout, tmperr)
       # map task_id to pid

http://git-wip-us.apache.org/repos/asf/ambari/blob/9cc01b45/ambari-agent/src/main/python/ambari_agent/PythonReflectiveExecutor.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/main/python/ambari_agent/PythonReflectiveExecutor.py b/ambari-agent/src/main/python/ambari_agent/PythonReflectiveExecutor.py
index 4a7ed1c..2c42891 100644
--- a/ambari-agent/src/main/python/ambari_agent/PythonReflectiveExecutor.py
+++ b/ambari-agent/src/main/python/ambari_agent/PythonReflectiveExecutor.py
@@ -42,12 +42,13 @@ class PythonReflectiveExecutor(PythonExecutor):
     
   def run_file(self, script, script_params, tmpoutfile, tmperrfile,
                timeout, tmpstructedoutfile, callback, task_id,
-               override_output_files = True, handle = None, log_info_on_failure=True):   
+               override_output_files = True, backup_log_files = True,
+               handle = None, log_info_on_failure=True):
     pythonCommand = self.python_command(script, script_params)
     logger.debug("Running command reflectively " + pprint.pformat(pythonCommand))
     
     script_dir = os.path.dirname(script)
-    self.open_subprocess_files(tmpoutfile, tmperrfile, override_output_files)
+    self.open_subprocess_files(tmpoutfile, tmperrfile, override_output_files, backup_log_files)
     returncode = 1
 
     try:


[02/16] ambari git commit: AMBARI-14880. Add popup that will show desired messages upon login (alexantonenko)

Posted by nc...@apache.org.
AMBARI-14880. Add popup that will show desired messages upon login (alexantonenko)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/aab6d889
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/aab6d889
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/aab6d889

Branch: refs/heads/branch-dev-patch-upgrade
Commit: aab6d889895ddf2c02fd6fa2c79ec9901fb9e768
Parents: 9cc01b4
Author: Alex Antonenko <hi...@gmail.com>
Authored: Tue Feb 2 19:14:21 2016 +0200
Committer: Alex Antonenko <hi...@gmail.com>
Committed: Tue Feb 2 19:14:33 2016 +0200

----------------------------------------------------------------------
 .../loginActivities/LoginMessageMainCtrl.js     | 53 +++++++++++---
 .../ui/admin-web/app/scripts/i18n.config.js     |  6 +-
 .../resources/ui/admin-web/app/styles/main.css  |  8 ++-
 .../app/views/loginActivities/loginMessage.html | 74 +++++++++++++-------
 ambari-web/app/assets/data/clusters/info.json   | 11 ---
 ambari-web/app/assets/data/settings/motd.json   | 10 +++
 ambari-web/app/messages.js                      |  1 +
 ambari-web/app/router.js                        | 41 +++++++++++
 ambari-web/app/utils/ajax/ajax.js               |  4 ++
 9 files changed, 158 insertions(+), 50 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/aab6d889/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/loginActivities/LoginMessageMainCtrl.js
----------------------------------------------------------------------
diff --git a/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/loginActivities/LoginMessageMainCtrl.js b/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/loginActivities/LoginMessageMainCtrl.js
index 11fcea5..88c4d27 100644
--- a/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/loginActivities/LoginMessageMainCtrl.js
+++ b/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/loginActivities/LoginMessageMainCtrl.js
@@ -18,17 +18,21 @@
 'use strict';
 
 angular.module('ambariAdminConsole')
-  .controller('LoginMessageMainCtrl',['$scope', 'Alert', '$timeout', '$http', '$translate', function($scope, Alert, $timeout, $http, $translate) {
-    var $t = $translate.instant;
+  .controller('LoginMessageMainCtrl',['$scope', 'Alert', '$timeout', '$location', '$http', '$translate', 'UnsavedDialog', function($scope, Alert, $timeout, $location, $http, $translate, UnsavedDialog) {
+    var $t = $translate.instant,
+      targetUrl = '/loginActivities';
+
     $scope.status = false;
     $scope.motdExists = false;
     $scope.text = "";
+    $scope.buttonText = "Ok";
     $scope.submitDisabled = true;
 
     $http.get('/api/v1/admin-settings/motd').then(function (res) {
-      var respons = JSON.parse(res.data.AdminSettings.content);
-      $scope.text = respons.text ? respons.text : "";
-      $scope.status = respons.status && respons.status == "true" ? true : false;
+      var response = JSON.parse(res.data.AdminSettings.content);
+      $scope.text = response.text ? response.text : "";
+      $scope.buttonText = response.button ? response.button : "";
+      $scope.status = response.status && response.status == "true" ? true : false;
       $scope.motdExists = true;
     });
 
@@ -40,28 +44,57 @@ angular.module('ambariAdminConsole')
       $scope.submitDisabled = false;
     };
 
-    $scope.saveLoginMsg = function(form) {
+    $scope.$watch(function(scope) {
+      return scope.submitDisabled;
+    }, function(submitDisabled) {
+      $scope.form.$dirty = !submitDisabled
+    });
+
+    $scope.saveLoginMsg = function(targetUrl) {
       var method = $scope.motdExists ? 'PUT' : 'POST';
       var data = {
         'AdminSettings' : {
-          'content' : '{"text":"' + $scope.text + '", "status":"' + $scope.status + '"}',
+          'content' : '{"text":"' + $scope.text + '", "button":"' + $scope.buttonText + '", "status":"' + $scope.status + '"}',
           'name' : 'motd',
           'setting_type' : 'ambari-server'
         }
       };
-      form.submitted = true;
-      if (form.$valid){
+      $scope.form.submitted = true;
+      if ($scope.form.$valid){
         $scope.submitDisabled = true;
-        $http({
+        return $http({
           method: method,
           url: '/api/v1/admin-settings/' + ($scope.motdExists ? 'motd' : ''),
           data: data
         }).then(function successCallback() {
           $scope.motdExists = true;
+          targetUrl ? $location.path(targetUrl) : "";
         }, function errorCallback(data) {
           $scope.submitDisabled = false;
           Alert.error($t('common.loginActivities.saveError'), data.data.message);
         });
       }
     };
+
+    $scope.$on('$locationChangeStart', function(event, __targetUrl) {
+      if( $scope.form.$dirty ){
+        UnsavedDialog().then(function(action) {
+          targetUrl = __targetUrl.split('#').pop();
+          switch(action){
+            case 'save':
+              $scope.saveLoginMsg(targetUrl);
+              break;
+            case 'discard':
+              $scope.form.$setPristine();
+              $location.path(targetUrl);
+              break;
+            case 'cancel':
+              targetUrl = '/loginActivities';
+              break;
+          }
+        });
+        event.preventDefault();
+      }
+    });
+
   }]);

http://git-wip-us.apache.org/repos/asf/ambari/blob/aab6d889/ambari-admin/src/main/resources/ui/admin-web/app/scripts/i18n.config.js
----------------------------------------------------------------------
diff --git a/ambari-admin/src/main/resources/ui/admin-web/app/scripts/i18n.config.js b/ambari-admin/src/main/resources/ui/admin-web/app/scripts/i18n.config.js
index c83d627..c1b9d88 100644
--- a/ambari-admin/src/main/resources/ui/admin-web/app/scripts/i18n.config.js
+++ b/ambari-admin/src/main/resources/ui/admin-web/app/scripts/i18n.config.js
@@ -85,9 +85,13 @@ angular.module('ambariAdminConsole')
         'loginActivities':'Login Activities',
         'loginMessage': 'Login Message',
         'loginMessage.placeholder': 'Please enter login message',
+        'buttonText.placeholder': 'Please enter text for the "ok" button',
         'homeDirectory': 'Home Directory',
         'onlySimpleChars': 'Must contain only simple characters.',
-        'saveError': 'Save error'
+        'saveError': 'Save error',
+        'message': 'Message',
+        'buttonText': 'Button text',
+        'switch': 'On/Off'
       },
 
       'controls': {

http://git-wip-us.apache.org/repos/asf/ambari/blob/aab6d889/ambari-admin/src/main/resources/ui/admin-web/app/styles/main.css
----------------------------------------------------------------------
diff --git a/ambari-admin/src/main/resources/ui/admin-web/app/styles/main.css b/ambari-admin/src/main/resources/ui/admin-web/app/styles/main.css
index f54d87c..0474e9c 100644
--- a/ambari-admin/src/main/resources/ui/admin-web/app/styles/main.css
+++ b/ambari-admin/src/main/resources/ui/admin-web/app/styles/main.css
@@ -682,11 +682,13 @@ table.no-border tr td{
   min-height: 63px;
 }
 
-.login-message-pane .active, .inactive {font-size:30px;cursor:pointer;float: left;margin-left: 17px;}
+.login-message-pane .active, .inactive {font-size:30px;cursor:pointer;float: left;}
 .login-message-pane i.active {color: #5cb85c;margin-top: 3px;}
 .login-message-pane i.inactive {color: #d9534f;margin-top: 2px;}
-.login-message-pane .well {height: 74px;}
-.login-message-pane input {margin-left: 3px;}
+.login-message-pane .on-off-switch-wrap {height:32px;}
+
+/*.login-message-pane .well {height: 74px;}
+.login-message-pane input {margin-left: 3px;}*/
 
 .views-permissions-panel .panel-body{
   padding-bottom: 0;

http://git-wip-us.apache.org/repos/asf/ambari/blob/aab6d889/ambari-admin/src/main/resources/ui/admin-web/app/views/loginActivities/loginMessage.html
----------------------------------------------------------------------
diff --git a/ambari-admin/src/main/resources/ui/admin-web/app/views/loginActivities/loginMessage.html b/ambari-admin/src/main/resources/ui/admin-web/app/views/loginActivities/loginMessage.html
index 8fb7870..37b6165 100644
--- a/ambari-admin/src/main/resources/ui/admin-web/app/views/loginActivities/loginMessage.html
+++ b/ambari-admin/src/main/resources/ui/admin-web/app/views/loginActivities/loginMessage.html
@@ -18,44 +18,68 @@
 
 <br/>
 <div class="login-message-pane" ng-controller="LoginMessageMainCtrl">
-
   <form class="form-horizontal" novalidate name="form" autocomplete="off">
     <div class="well">
-      <div class="form-group" ng-class="{'has-error' : (form.login_text.$error.pattern) && form.submitted}">
-        <i class="fa fa-toggle-on active"
-           ng-if="status == true"
-           ng-click="changeStatus();">
-        </i>
-        <i class="fa fa-toggle-on fa-rotate-180 inactive"
-           ng-if="status == false"
-           ng-click="changeStatus();">
-        </i>
-        <div class="col-sm-11">
-          <input type="text"
-                 ng-disabled="!status"
+      <fieldset>
+        <div class="form-group" ng-class="{'has-error' : (form.login_text.$error.pattern) && form.submitted}">
+          <label class="col-sm-2 control-label">{{'common.loginActivities.switch' | translate}}</label>
+          <div class="on-off-switch-wrap col-sm-10">
+            <i class="fa fa-toggle-on active"
+               ng-if="status == true"
+               ng-click="changeStatus();">
+            </i>
+            <i class="fa fa-toggle-on fa-rotate-180 inactive"
+               ng-if="status == false"
+               ng-click="changeStatus();">
+            </i>
+            <input type="checkbox" name="status" class="hidden" ng-model="status">
+          </div>
+        </div>
+        <div class="form-group" ng-class="{'has-error' : (form.login_text.$error.pattern) && form.submitted}">
+          <label class="col-sm-2 control-label">{{'common.loginActivities.message' | translate}}</label>
+          <div class="col-sm-10">
+            <input type="text"
                  class="form-control"
                  name="login_text"
                  placeholder="{{'common.loginActivities.loginMessage.placeholder' | translate}}"
                  ng-model="text"
                  ng-change="inputChangeEvent()"
                  ng-pattern="/^([a-zA-Z0-9._\s]+)$/"
+                 ng-disabled="!status"
                  autocomplete="off">
 
-          <div class="alert alert-danger top-margin" ng-show="form.login_text.$error.pattern && form.submitted">
-            {{'common.loginActivities.onlySimpleChars' | translate}}
+            <div class="alert alert-danger top-margin" ng-show="form.login_text.$error.pattern && form.submitted">
+             {{'common.loginActivities.onlySimpleChars' | translate}}
+            </div>
           </div>
         </div>
+        <div class="form-group" ng-class="{'has-error' : (form.login_text.$error.pattern) && form.submitted}">
+          <label class="col-sm-2 control-label">{{'common.loginActivities.buttonText' | translate}}</label>
+          <div class="col-sm-5">
+            <input type="text"
+                   class="form-control"
+                   name="button_text"
+                   placeholder="{{'common.loginActivities.buttonText.placeholder' | translate}}"
+                   ng-model="buttonText"
+                   ng-change="inputChangeEvent()"
+                   ng-disabled="!status"
+                   ng-pattern="/^([a-zA-Z0-9._\s]+)$/"
+                   autocomplete="off">
+
+            <div class="alert alert-danger top-margin" ng-show="form.button_text.$error.pattern && form.submitted">
+              {{'common.loginActivities.onlySimpleChars' | translate}}
+            </div>
+          </div>
+        </div>
+        <div class="col-sm-offset-2 col-sm-10">
+          <button
+            class="btn btn-primary groupcreate-btn pull-right left-margin"
+            ng-disabled="submitDisabled"
+            ng-click="saveLoginMsg()">
+            {{'common.controls.save' | translate}}
+          </button>
       </div>
-    </div>
-    <div class="form-group">
-      <div class="col-sm-offset-2 col-sm-10">
-        <button
-          class="btn btn-primary groupcreate-btn pull-right left-margin"
-          ng-disabled="submitDisabled"
-          ng-click="saveLoginMsg(form)">
-          {{'common.controls.save' | translate}}
-        </button>
-      </div>
+      </fieldset>
     </div>
   </form>
 </div>

http://git-wip-us.apache.org/repos/asf/ambari/blob/aab6d889/ambari-web/app/assets/data/clusters/info.json
----------------------------------------------------------------------
diff --git a/ambari-web/app/assets/data/clusters/info.json b/ambari-web/app/assets/data/clusters/info.json
deleted file mode 100644
index 072af2f..0000000
--- a/ambari-web/app/assets/data/clusters/info.json
+++ /dev/null
@@ -1,11 +0,0 @@
-{
-  "items" : [
-    {
-      "Clusters" : {
-        "cluster_name" : "tdk",
-        "provisioning_state" : "INSTALLED",
-        "version" : "HDP-2.0.1"
-      }
-    }
-  ]
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/aab6d889/ambari-web/app/assets/data/settings/motd.json
----------------------------------------------------------------------
diff --git a/ambari-web/app/assets/data/settings/motd.json b/ambari-web/app/assets/data/settings/motd.json
new file mode 100644
index 0000000..c051a90
--- /dev/null
+++ b/ambari-web/app/assets/data/settings/motd.json
@@ -0,0 +1,10 @@
+{
+  "href" : "/api/v1/admin-settings/motd",
+  "AdminSettings" : {
+    "content" : "{\"text\":\"You are using test mode\", \"button\":\"\", \"status\":\"true\"}",
+    "name" : "motd",
+    "setting_type" : "ambari-server",
+    "update_timestamp" : 1454428666251,
+    "updated_by" : "admin"
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/aab6d889/ambari-web/app/messages.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/messages.js b/ambari-web/app/messages.js
index 2de2f2b..8861b2a 100644
--- a/ambari-web/app/messages.js
+++ b/ambari-web/app/messages.js
@@ -424,6 +424,7 @@ Em.I18n.translations = {
   'popup.jdkValidation.body': 'The {0} Stack requires JDK {1} but Ambari is configured for JDK {2}. This could result in error or problems with running your cluster.',
 
   'login.header':'Sign in',
+  'login.message.title':'Login Message',
   'login.username':'Username',
   'login.loginButton':'Sign in',
   'login.error.bad.credentials':'Unable to sign in. Invalid username/password combination.',

http://git-wip-us.apache.org/repos/asf/ambari/blob/aab6d889/ambari-web/app/router.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/router.js b/ambari-web/app/router.js
index 3c6a07c..08df9c5 100644
--- a/ambari-web/app/router.js
+++ b/ambari-web/app/router.js
@@ -317,6 +317,13 @@ App.Router = Em.Router.extend({
       loginData: data
     };
     App.router.get('clusterController').loadAuthorizations().complete(function() {
+      App.ajax.send({
+        name: 'router.login.message',
+        sender: self,
+        success: 'showLoginMessage'
+
+    });
+
       // no need to load cluster data if it's already loaded
       if (self.get('clusterData')) {
         self.loginGetClustersSuccessCallback(self.get('clusterData'), {}, requestData);
@@ -352,6 +359,40 @@ App.Router = Em.Router.extend({
   },
 
   /**
+   * success callback of router.login.message
+   * @param {object} data
+   */
+  showLoginMessage: function (data){
+    var response = JSON.parse(data.AdminSettings.content),
+      text = response.text ? response.text : "",
+      buttonText = response.button ? response.button : Em.I18n.t('ok'),
+      status = response.status && response.status == "true" ? true : false;
+
+    if(text && status){
+      return App.ModalPopup.show({
+        classNames: ['sixty-percent-width-modal'],
+        header: Em.I18n.t('login.message.title'),
+        bodyClass: Ember.View.extend({
+          template: Ember.Handlebars.compile(text)
+        }),
+        primary: buttonText,
+        secondary: null,
+
+        onPrimary: function () {
+          this.hide();
+        },
+        onClose: function () {
+          this.hide();
+        },
+        didInsertElement: function () {
+          this.fitHeight();
+        }
+      });
+    }
+  },
+
+
+  /**
    * success callback of login request
    * @param {object} clustersData
    * @param {object} opt

http://git-wip-us.apache.org/repos/asf/ambari/blob/aab6d889/ambari-web/app/utils/ajax/ajax.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/utils/ajax/ajax.js b/ambari-web/app/utils/ajax/ajax.js
index 4872f50..9cd54d2 100644
--- a/ambari-web/app/utils/ajax/ajax.js
+++ b/ambari-web/app/utils/ajax/ajax.js
@@ -2088,6 +2088,10 @@ var urls = {
     'real': '/clusters?fields=Clusters/provisioning_state',
     'mock': '/data/clusters/info.json'
   },
+  'router.login.message': {
+    'real': '/admin-settings/motd',
+    'mock': '/data/settings/motd.json'
+  },
   'router.logoff': {
     'real': '/logout',
     'mock': '',


[07/16] ambari git commit: AMBARI-14877. [Ambari tarballs] Refactor postinstall and postremove scripts to use install-helper.sh exclusively (aonishuk)

Posted by nc...@apache.org.
AMBARI-14877. [Ambari tarballs] Refactor postinstall and postremove scripts to use install-helper.sh exclusively (aonishuk)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/f6a75a60
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/f6a75a60
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/f6a75a60

Branch: refs/heads/branch-dev-patch-upgrade
Commit: f6a75a609caa1ce24c6cf9ab1f6303eb5bc9e923
Parents: 58b91c8
Author: Andrew Onishuk <ao...@hortonworks.com>
Authored: Wed Feb 3 11:52:49 2016 +0200
Committer: Andrew Onishuk <ao...@hortonworks.com>
Committed: Wed Feb 3 11:52:49 2016 +0200

----------------------------------------------------------------------
 ambari-agent/conf/unix/install-helper.sh        | 40 +++++++++++++++-
 .../src/main/package/deb/control/postinst       | 12 +----
 .../src/main/package/deb/control/posttrm        | 15 ------
 ambari-agent/src/main/package/deb/control/prerm |  8 ----
 .../src/main/package/rpm/postinstall.sh         | 19 +-------
 ambari-agent/src/main/package/rpm/preremove.sh  |  8 ----
 ambari-agent/src/packages/tarball/all.xml       |  4 +-
 ambari-server/conf/unix/install-helper.sh       | 48 ++++++++++++++++----
 ambari-server/src/main/assemblies/server.xml    |  2 +-
 .../src/main/package/deb/control/postinst       | 10 +---
 .../src/main/package/deb/control/posttrm        | 15 ------
 .../src/main/package/deb/control/prerm          | 18 +-------
 .../src/main/package/rpm/postinstall.sh         |  8 +---
 ambari-server/src/main/package/rpm/preremove.sh | 18 ++------
 14 files changed, 92 insertions(+), 133 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/f6a75a60/ambari-agent/conf/unix/install-helper.sh
----------------------------------------------------------------------
diff --git a/ambari-agent/conf/unix/install-helper.sh b/ambari-agent/conf/unix/install-helper.sh
index 185290c..91ae248 100644
--- a/ambari-agent/conf/unix/install-helper.sh
+++ b/ambari-agent/conf/unix/install-helper.sh
@@ -33,6 +33,11 @@ PYTHON_WRAPER_TARGET="/usr/bin/ambari-python-wrap"
 PYTHON_WRAPER_SOURCE="/var/lib/ambari-agent/ambari-python-wrap"
 
 do_install(){
+  if [ -d "/etc/ambari-agent/conf.save" ]; then
+    cp -f /etc/ambari-agent/conf.save/* /etc/ambari-agent/conf
+    mv /etc/ambari-agent/conf.save /etc/ambari-agent/conf_$(date '+%d_%m_%y_%H_%M').save
+  fi
+    
   # setting ambari_commons shared resource
   rm -rf "$OLD_COMMON_DIR"
   if [ ! -d "$COMMON_DIR" ]; then
@@ -61,10 +66,34 @@ do_install(){
   
   chmod 777 /var/lib/ambari-agent/tmp
   chmod 700 /var/lib/ambari-agent/data
+
+  which chkconfig > /dev/null 2>&1
+  if [ "$?" -eq 0 ] ; then
+    chkconfig --add ambari-agent
+  fi
+  which update-rc.d > /dev/null 2>&1
+  if [ "$?" -eq 0 ] ; then
+    update-rc.d ambari-agent defaults
+  fi
+
+  BAK=/etc/ambari-agent/conf/ambari-agent.ini.old
+  ORIG=/etc/ambari-agent/conf/ambari-agent.ini
+
+  if [ -f $BAK ]; then
+    if [ -f "/var/lib/ambari-agent/upgrade_agent_configs.py" ]; then
+      /var/lib/ambari-agent/upgrade_agent_configs.py
+    fi
+    mv $BAK ${BAK}_$(date '+%d_%m_%y_%H_%M').save
+  fi
 }
 
 do_remove(){
-
+  /usr/sbin/ambari-agent stop > /dev/null 2>&1
+  if [ -d "/etc/ambari-agent/conf.save" ]; then
+    mv /etc/ambari-agent/conf.save /etc/ambari-agent/conf_$(date '+%d_%m_%y_%H_%M').save
+  fi
+  mv /etc/ambari-agent/conf /etc/ambari-agent/conf.save
+    
   if [ -f "$PYTHON_WRAPER_TARGET" ]; then
     rm -f "$PYTHON_WRAPER_TARGET"
   fi
@@ -93,6 +122,15 @@ do_remove(){
   if [ -f "$INSTALL_HELPER_SERVER" ]; then  #  call server shared files installer
     $INSTALL_HELPER_SERVER install
   fi
+
+  which chkconfig > /dev/null 2>&1
+  if [ "$?" -eq 0 ] ; then
+    chkconfig --list | grep ambari-server && chkconfig --del ambari-agent
+  fi
+  which update-rc.d > /dev/null 2>&1
+  if [ "$?" -eq 0 ] ; then
+    update-rc.d -f ambari-agent remove
+  fi
 }
 
 do_upgrade(){

http://git-wip-us.apache.org/repos/asf/ambari/blob/f6a75a60/ambari-agent/src/main/package/deb/control/postinst
----------------------------------------------------------------------
diff --git a/ambari-agent/src/main/package/deb/control/postinst b/ambari-agent/src/main/package/deb/control/postinst
index ceca9be..48cb07c 100644
--- a/ambari-agent/src/main/package/deb/control/postinst
+++ b/ambari-agent/src/main/package/deb/control/postinst
@@ -14,20 +14,12 @@
 # See the License for the specific language governing permissions and
 # limitations under the License
 
+# Warning: don't add changes to this script directly, please add changes to install-helper.sh.
+
 if [ "$1" == "configure" ]; then  # Action is install
   if [ -f "/var/lib/ambari-agent/install-helper.sh" ]; then
     /var/lib/ambari-agent/install-helper.sh install
   fi
-  update-rc.d ambari-agent defaults
 fi
 
-BAK=/etc/ambari-agent/conf/ambari-agent.ini.old
-ORIG=/etc/ambari-agent/conf/ambari-agent.ini
-
-if [ -f $BAK ];then
-  if [ -f "/var/lib/ambari-agent/upgrade_agent_configs.py" ]; then
-    /var/lib/ambari-agent/upgrade_agent_configs.py
-  fi
-  mv $BAK ${BAK}_$(date '+%d_%m_%y_%H_%M').save
-fi
 exit 0

http://git-wip-us.apache.org/repos/asf/ambari/blob/f6a75a60/ambari-agent/src/main/package/deb/control/posttrm
----------------------------------------------------------------------
diff --git a/ambari-agent/src/main/package/deb/control/posttrm b/ambari-agent/src/main/package/deb/control/posttrm
deleted file mode 100644
index 21a01fa..0000000
--- a/ambari-agent/src/main/package/deb/control/posttrm
+++ /dev/null
@@ -1,15 +0,0 @@
-#!/bin/bash
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License

http://git-wip-us.apache.org/repos/asf/ambari/blob/f6a75a60/ambari-agent/src/main/package/deb/control/prerm
----------------------------------------------------------------------
diff --git a/ambari-agent/src/main/package/deb/control/prerm b/ambari-agent/src/main/package/deb/control/prerm
index 70a9c63..8da949c 100644
--- a/ambari-agent/src/main/package/deb/control/prerm
+++ b/ambari-agent/src/main/package/deb/control/prerm
@@ -19,17 +19,9 @@
 # for details
 
 if [ "$1" == "remove" ]; then # Action is uninstall
-    /usr/sbin/ambari-agent stop > /dev/null 2>&1
-    if [ -d "/etc/ambari-agent/conf.save" ];  then
-        mv /etc/ambari-agent/conf.save /etc/ambari-agent/conf_$(date '+%d_%m_%y_%H_%M').save
-    fi
-    mv /etc/ambari-agent/conf /etc/ambari-agent/conf.save
-
     if [ -f "/var/lib/ambari-agent/install-helper.sh" ]; then
       /var/lib/ambari-agent/install-helper.sh remove
     fi
-
-    update-rc.d -f ambari-agent remove
 fi
 
 exit 0

http://git-wip-us.apache.org/repos/asf/ambari/blob/f6a75a60/ambari-agent/src/main/package/rpm/postinstall.sh
----------------------------------------------------------------------
diff --git a/ambari-agent/src/main/package/rpm/postinstall.sh b/ambari-agent/src/main/package/rpm/postinstall.sh
index 56c6573..17e6e0e 100644
--- a/ambari-agent/src/main/package/rpm/postinstall.sh
+++ b/ambari-agent/src/main/package/rpm/postinstall.sh
@@ -13,36 +13,19 @@
 # See the License for the specific language governing permissions and
 # limitations under the License
 
+# Warning: don't add changes to this script directly, please add changes to install-helper.sh.
 
 case "$1" in
   1) # Action install
     if [ -f "/var/lib/ambari-agent/install-helper.sh" ]; then
         /var/lib/ambari-agent/install-helper.sh install
     fi
-  chkconfig --add ambari-agent
   ;;
   2) # Action upgrade
-    if [ -d "/etc/ambari-agent/conf.save" ]; then
-        cp -f /etc/ambari-agent/conf.save/* /etc/ambari-agent/conf
-        mv /etc/ambari-agent/conf.save /etc/ambari-agent/conf_$(date '+%d_%m_%y_%H_%M').save
-    fi
-
     if [ -f "/var/lib/ambari-agent/install-helper.sh" ]; then
         /var/lib/ambari-agent/install-helper.sh upgrade
     fi
   ;;
 esac
 
-
-BAK=/etc/ambari-agent/conf/ambari-agent.ini.old
-ORIG=/etc/ambari-agent/conf/ambari-agent.ini
-
-if [ -f $BAK ]; then
-  if [ -f "/var/lib/ambari-agent/upgrade_agent_configs.py" ]; then
-    /var/lib/ambari-agent/upgrade_agent_configs.py
-  fi
-  mv $BAK ${BAK}_$(date '+%d_%m_%y_%H_%M').save
-fi
-
-
 exit 0

http://git-wip-us.apache.org/repos/asf/ambari/blob/f6a75a60/ambari-agent/src/main/package/rpm/preremove.sh
----------------------------------------------------------------------
diff --git a/ambari-agent/src/main/package/rpm/preremove.sh b/ambari-agent/src/main/package/rpm/preremove.sh
index 2078acd..e266114 100644
--- a/ambari-agent/src/main/package/rpm/preremove.sh
+++ b/ambari-agent/src/main/package/rpm/preremove.sh
@@ -19,17 +19,9 @@
 
 
 if [ "$1" -eq 0 ]; then  # Action is uninstall
-    /usr/sbin/ambari-agent stop > /dev/null 2>&1
-    if [ -d "/etc/ambari-agent/conf.save" ]; then
-        mv /etc/ambari-agent/conf.save /etc/ambari-agent/conf_$(date '+%d_%m_%y_%H_%M').save
-    fi
-    mv /etc/ambari-agent/conf /etc/ambari-agent/conf.save
-
     if [ -f "/var/lib/ambari-agent/install-helper.sh" ]; then
       /var/lib/ambari-agent/install-helper.sh remove
     fi
-
-    chkconfig --list | grep ambari-server && chkconfig --del ambari-server
 fi
 
 exit 0

http://git-wip-us.apache.org/repos/asf/ambari/blob/f6a75a60/ambari-agent/src/packages/tarball/all.xml
----------------------------------------------------------------------
diff --git a/ambari-agent/src/packages/tarball/all.xml b/ambari-agent/src/packages/tarball/all.xml
index d4ed2ff..d9732cc 100644
--- a/ambari-agent/src/packages/tarball/all.xml
+++ b/ambari-agent/src/packages/tarball/all.xml
@@ -161,7 +161,7 @@
     </file>
     <file>
       <fileMode>755</fileMode>
-      <source>conf/unix/ambari-agent</source>
+      <source>${basedir}/target/src/ambari-agent</source>
       <outputDirectory>/usr/sbin</outputDirectory>
     </file>
     <file>
@@ -187,7 +187,7 @@
     <file>
       <fileMode>755</fileMode>
       <source>etc/init.d/ambari-agent</source>
-      <outputDirectory>/etc/init.d/ambari-agent</outputDirectory>
+      <outputDirectory>/etc/init.d</outputDirectory>
     </file>
     <file>
       <fileMode>644</fileMode>

http://git-wip-us.apache.org/repos/asf/ambari/blob/f6a75a60/ambari-server/conf/unix/install-helper.sh
----------------------------------------------------------------------
diff --git a/ambari-server/conf/unix/install-helper.sh b/ambari-server/conf/unix/install-helper.sh
index 2635694..c7d05b4 100644
--- a/ambari-server/conf/unix/install-helper.sh
+++ b/ambari-server/conf/unix/install-helper.sh
@@ -33,6 +33,9 @@ PYTHON_WRAPER_TARGET="/usr/bin/ambari-python-wrap"
 PYTHON_WRAPER_SOURCE="/var/lib/ambari-server/ambari-python-wrap"
 
 do_install(){
+  rm -f /usr/sbin/ambari-server
+  ln -s /etc/init.d/ambari-server /usr/sbin/ambari-server
+ 
   # setting ambari_commons shared resource
   rm -rf "$OLD_COMMON_DIR"
   if [ ! -d "$COMMON_DIR" ]; then
@@ -54,10 +57,27 @@ do_install(){
   if [ ! -f "$PYTHON_WRAPER_TARGET" ]; then
     ln -s "$PYTHON_WRAPER_SOURCE" "$PYTHON_WRAPER_TARGET"
   fi
+
+  which chkconfig > /dev/null 2>&1
+  if [ "$?" -eq 0 ] ; then
+    chkconfig --add ambari-server
+  fi
+  which update-rc.d > /dev/null 2>&1
+  if [ "$?" -eq 0 ] ; then
+    update-rc.d ambari-server defaults
+  fi
 }
 
 do_remove(){
+  /usr/sbin/ambari-server stop > /dev/null 2>&1
+  if [ -d "/etc/ambari-server/conf.save" ]; then
+      mv /etc/ambari-server/conf.save /etc/ambari-server/conf_$(date '+%d_%m_%y_%H_%M').save
+  fi
+  # Remove link created during install
+  rm -f /usr/sbin/ambari-server
 
+  mv /etc/ambari-server/conf /etc/ambari-server/conf.save
+    
   if [ -f "$PYTHON_WRAPER_TARGET" ]; then
     rm -f "$PYTHON_WRAPER_TARGET"
   fi
@@ -90,20 +110,30 @@ do_remove(){
   if [ -f "$INSTALL_HELPER_AGENT" ]; then  #  call agent shared files installer
     $INSTALL_HELPER_AGENT install
   fi
+
+  which chkconfig > /dev/null 2>&1
+  if [ "$?" -eq 0 ] ; then
+    chkconfig --list | grep ambari-server && chkconfig --del ambari-server
+  fi
+  which update-rc.d > /dev/null 2>&1
+  if [ "$?" -eq 0 ] ; then
+    update-rc.d -f ambari-server remove
+  fi
 }
 
 do_upgrade(){
+  # this function only gets called for rpm. Deb packages always call do_install directly.
   do_install
 }
 
 case "$1" in
-install)
-  do_install
-  ;;
-remove)
-  do_remove
-  ;;
-upgrade)
-  do_upgrade
-  ;;
+	install)
+	  do_install
+	  ;;
+	remove)
+	  do_remove
+	  ;;
+	upgrade)
+	  do_upgrade
+	  ;;
 esac

http://git-wip-us.apache.org/repos/asf/ambari/blob/f6a75a60/ambari-server/src/main/assemblies/server.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/assemblies/server.xml b/ambari-server/src/main/assemblies/server.xml
index c24bce6..17d6bdb 100644
--- a/ambari-server/src/main/assemblies/server.xml
+++ b/ambari-server/src/main/assemblies/server.xml
@@ -200,7 +200,7 @@
     </file>
     <file>
       <fileMode>755</fileMode>
-      <source>sbin/ambari-server</source>
+      <source>${basedir}/target/ambari-server</source>
       <outputDirectory>/etc/init.d</outputDirectory>
     </file>
     <file>

http://git-wip-us.apache.org/repos/asf/ambari/blob/f6a75a60/ambari-server/src/main/package/deb/control/postinst
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/package/deb/control/postinst b/ambari-server/src/main/package/deb/control/postinst
index c3b6235..e8d12c3 100644
--- a/ambari-server/src/main/package/deb/control/postinst
+++ b/ambari-server/src/main/package/deb/control/postinst
@@ -14,18 +14,12 @@
 # See the License for the specific language governing permissions and
 # limitations under the License
 
-if [ -e "/usr/sbin/ambari-server" ]; then # Check is needed for upgrade
-    # Remove link created by previous package version
-    rm -f /usr/sbin/ambari-server
-fi
-
-ln -s /etc/init.d/ambari-server /usr/sbin/ambari-server
+# Warning: don't add changes to this script directly, please add changes to install-helper.sh.
 
-if [ "$1" == "configure" ]; then  # Action is install
+if [ "$1" == "configure" ] ; then
   if [ -f "/var/lib/ambari-server/install-helper.sh" ]; then
       /var/lib/ambari-server/install-helper.sh install
   fi
-  update-rc.d ambari-server defaults
 fi
 
 exit 0

http://git-wip-us.apache.org/repos/asf/ambari/blob/f6a75a60/ambari-server/src/main/package/deb/control/posttrm
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/package/deb/control/posttrm b/ambari-server/src/main/package/deb/control/posttrm
deleted file mode 100644
index 21a01fa..0000000
--- a/ambari-server/src/main/package/deb/control/posttrm
+++ /dev/null
@@ -1,15 +0,0 @@
-#!/bin/bash
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License

http://git-wip-us.apache.org/repos/asf/ambari/blob/f6a75a60/ambari-server/src/main/package/deb/control/prerm
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/package/deb/control/prerm b/ambari-server/src/main/package/deb/control/prerm
index 927ba4c..98e3be5 100644
--- a/ambari-server/src/main/package/deb/control/prerm
+++ b/ambari-server/src/main/package/deb/control/prerm
@@ -14,26 +14,12 @@
 # See the License for the specific language governing permissions and
 # limitations under the License
 
+# Warning: don't add changes to this script directly, please add changes to install-helper.sh.
 
-
-if [ "$1" == "remove" ]; then # Action is uninstall
-    /usr/sbin/ambari-server stop > /dev/null 2>&1
-    if [ -d "/etc/ambari-server/conf.save" ]; then
-        mv /etc/ambari-server/conf.save /etc/ambari-server/conf_$(date '+%d_%m_%y_%H_%M').save
-    fi
-
-    if [ -e "/usr/sbin/ambari-server" ]; then
-        # Remove link created during install
-        rm /usr/sbin/ambari-server
-    fi
-
-    mv /etc/ambari-server/conf /etc/ambari-server/conf.save
-
+if [ "$1" == "remove" ] ; then # Action is uninstall
     if [ -f "/var/lib/ambari-server/install-helper.sh" ]; then
       /var/lib/ambari-server/install-helper.sh remove
     fi
-
-    update-rc.d -f ambari-server remove
 fi
 
 exit 0

http://git-wip-us.apache.org/repos/asf/ambari/blob/f6a75a60/ambari-server/src/main/package/rpm/postinstall.sh
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/package/rpm/postinstall.sh b/ambari-server/src/main/package/rpm/postinstall.sh
index 9b0b54a..5ca1d17 100644
--- a/ambari-server/src/main/package/rpm/postinstall.sh
+++ b/ambari-server/src/main/package/rpm/postinstall.sh
@@ -13,19 +13,13 @@
 # See the License for the specific language governing permissions and
 # limitations under the License
 
-if [ -e "/usr/sbin/ambari-server" ]; then # Check is needed for upgrade
-    # Remove link created by previous package version
-    rm -f /usr/sbin/ambari-server
-fi
-
-ln -s /etc/init.d/ambari-server /usr/sbin/ambari-server
+# Warning: don't add changes to this script directly, please add changes to install-helper.sh.
 
 case "$1" in
   1) # Action install
     if [ -f "/var/lib/ambari-server/install-helper.sh" ]; then
         /var/lib/ambari-server/install-helper.sh install
     fi
-    chkconfig --add ambari-server
   ;;
   2) # Action upgrade
     if [ -f "/var/lib/ambari-server/install-helper.sh" ]; then

http://git-wip-us.apache.org/repos/asf/ambari/blob/f6a75a60/ambari-server/src/main/package/rpm/preremove.sh
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/package/rpm/preremove.sh b/ambari-server/src/main/package/rpm/preremove.sh
index 6febaa8..445e9ad 100644
--- a/ambari-server/src/main/package/rpm/preremove.sh
+++ b/ambari-server/src/main/package/rpm/preremove.sh
@@ -17,24 +17,12 @@
 # during package update. See http://www.ibm.com/developerworks/library/l-rpm2/
 # for details
 
-if [ "$1" -eq 0 ]; then  # Action is uninstall
-    /usr/sbin/ambari-server stop > /dev/null 2>&1
-    if [ -d "/etc/ambari-server/conf.save" ]; then
-        mv /etc/ambari-server/conf.save /etc/ambari-server/conf_$(date '+%d_%m_%y_%H_%M').save
-    fi
-
-    if [ -e "/usr/sbin/ambari-server" ]; then
-        # Remove link created during install
-        rm /usr/sbin/ambari-server
-    fi
-
-    mv /etc/ambari-server/conf /etc/ambari-server/conf.save
+# Warning: don't add changes to this script directly, please add changes to install-helper.sh.
 
+if [ "$1" -eq 0 ]; then  # Action is uninstall
     if [ -f "/var/lib/ambari-server/install-helper.sh" ]; then
       /var/lib/ambari-server/install-helper.sh remove
     fi
-
-    chkconfig --list | grep ambari-server && chkconfig --del ambari-server
 fi
 
-exit 0
+exit 0
\ No newline at end of file


[15/16] ambari git commit: AMBARI-14855: Add Alert for HAWQSTANDBY sync status with HAWQMASTER (mithmatt via jaoki)

Posted by nc...@apache.org.
AMBARI-14855: Add Alert for HAWQSTANDBY sync status with HAWQMASTER (mithmatt via jaoki)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/424cca6c
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/424cca6c
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/424cca6c

Branch: refs/heads/branch-dev-patch-upgrade
Commit: 424cca6c3744eca2c44dff0fbc47791492845f88
Parents: 7259d97
Author: Jun Aoki <ja...@apache.org>
Authored: Wed Feb 3 10:58:56 2016 -0800
Committer: Jun Aoki <ja...@apache.org>
Committed: Wed Feb 3 10:58:56 2016 -0800

----------------------------------------------------------------------
 .../common-services/HAWQ/2.0.0/alerts.json      |  19 ++
 .../2.0.0/package/alerts/alert_sync_status.py   |  91 +++++++++
 .../stacks/2.3/HAWQ/test_alert_sync_status.py   | 194 +++++++++++++++++++
 3 files changed, 304 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/424cca6c/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/alerts.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/alerts.json b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/alerts.json
new file mode 100644
index 0000000..3119a0c
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/alerts.json
@@ -0,0 +1,19 @@
+{
+  "HAWQ": {
+    "HAWQMASTER": [
+      {
+        "name": "hawqstandby_sync_status",
+        "label": "HAWQ Standby Sync Status",
+        "description": "This alert will trigger if HAWQ Standby is not synchronized with HAWQ Master",
+        "interval": 1,
+        "scope": "ANY",
+        "enabled": true,
+        "source": {
+          "type": "SCRIPT",
+          "path": "HAWQ/2.0.0/package/alerts/alert_sync_status.py",
+          "parameters": []
+        }
+      }
+    ]
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/424cca6c/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/alerts/alert_sync_status.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/alerts/alert_sync_status.py b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/alerts/alert_sync_status.py
new file mode 100644
index 0000000..c94be9e
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/alerts/alert_sync_status.py
@@ -0,0 +1,91 @@
+#!/usr/bin/env python
+
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+
+import logging
+from resource_management.core.shell import call
+
+HAWQ_USER = 'gpadmin'
+HAWQ_GREENPLUM_PATH_FILE = '/usr/local/hawq/greenplum_path.sh'
+
+HAWQMASTER_PORT = '{{hawq-site/hawq_master_address_port}}'
+HAWQSTANDBY_ADDRESS = '{{hawq-site/hawq_standby_address_host}}'
+
+RESULT_STATE_OK = 'OK'
+RESULT_STATE_WARNING = 'WARNING'
+RESULT_STATE_UNKNOWN = 'UNKNOWN'
+RESULT_STATE_SKIPPED = 'SKIPPED'
+
+logger = logging.getLogger('ambari_alerts')
+
+
+def get_tokens():
+  """
+  Returns a tuple of tokens in the format {{site/property}} that will be used to build the dictionary passed into execute
+  """
+  return (HAWQMASTER_PORT, HAWQSTANDBY_ADDRESS)
+  
+
+def execute(configurations={}, parameters={}, host_name=None):
+  """
+  Returns a tuple containing the result code and a pre-formatted result label
+
+  Keyword arguments:
+  configurations (dictionary): a mapping of configuration key to value
+  parameters (dictionary): a mapping of script parameter key to value
+  host_name (string): the name of this host where the alert is running
+  """
+
+  if configurations is None:
+    return (RESULT_STATE_UNKNOWN, ['There were no configurations supplied to the script.'])
+
+  # If HAWQSTANDBY is not installed on the cluster
+  if HAWQSTANDBY_ADDRESS not in configurations:
+   return (RESULT_STATE_SKIPPED, ['HAWQSTANDBY is not installed.'])
+
+  try:
+    sync_status = get_sync_status(configurations[HAWQMASTER_PORT])
+    if sync_status in ('Synchronized', 'Synchronizing'):
+      return (RESULT_STATE_OK, ['HAWQSTANDBY is in sync with HAWQMASTER.'])
+    elif sync_status == 'Not Synchronized':
+      return (RESULT_STATE_WARNING, ['HAWQSTANDBY is not in sync with HAWQMASTER.'])
+  except Exception, e:
+    logger.exception('[Alert] Retrieving HAWQSTANDBY sync status from HAWQMASTER fails on host, {0}:'.format(host_name))
+    logger.exception(str(e))
+
+  # Sync status cannot be determined
+  return (RESULT_STATE_UNKNOWN, ['Sync status cannot be determined.'])
+
+
+def get_sync_status(port):
+  """
+  Gets the sync status of HAWQSTANDBY from HAWQMASTER by running a SQL command.
+  summary_state can be of the following values: ('Synchronized', 'Synchronizing', 'Not Synchronized', 'None', 'Not Configured', 'Unknown')
+  """
+  query = "SELECT summary_state FROM gp_master_mirroring"
+  cmd = "source {0} && psql -p {1} -t -d template1 -c \"{2};\"".format(HAWQ_GREENPLUM_PATH_FILE, port, query)
+
+  returncode, output = call(cmd,
+                            user=HAWQ_USER,
+                            timeout=60)
+
+  if returncode:
+    raise
+
+  return output.strip()

http://git-wip-us.apache.org/repos/asf/ambari/blob/424cca6c/ambari-server/src/test/python/stacks/2.3/HAWQ/test_alert_sync_status.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.3/HAWQ/test_alert_sync_status.py b/ambari-server/src/test/python/stacks/2.3/HAWQ/test_alert_sync_status.py
new file mode 100644
index 0000000..7d030dc
--- /dev/null
+++ b/ambari-server/src/test/python/stacks/2.3/HAWQ/test_alert_sync_status.py
@@ -0,0 +1,194 @@
+#!/usr/bin/env python
+
+'''
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+'''
+
+# System imports
+import os
+import sys
+
+from mock.mock import patch
+
+# Local imports
+from stacks.utils.RMFTestCase import *
+
+COMMON_SERVICES_ALERTS_DIR = "HAWQ/2.0.0/package/alerts"
+
+file_path = os.path.dirname(os.path.abspath(__file__))
+file_path = os.path.dirname(os.path.dirname(os.path.dirname(os.path.dirname(os.path.dirname(file_path)))))
+file_path = os.path.join(file_path, "main", "resources", "common-services", COMMON_SERVICES_ALERTS_DIR)
+
+RESULT_STATE_OK = 'OK'
+RESULT_STATE_WARNING = 'WARNING'
+RESULT_STATE_UNKNOWN = 'UNKNOWN'
+RESULT_STATE_SKIPPED = 'SKIPPED'
+
+class TestAlertSyncStatus(RMFTestCase):
+
+  def setUp(self):
+    """
+    Import the class under test.
+    Because the class is present in a different folder, append its dir to the system path.
+    Also, shorten the import name and make it a global so the test functions can access it.
+    :return:
+    """
+    sys.path.append(file_path)
+    global alert_sync_status
+    import alert_sync_status
+  
+  def test_missing_configs(self):
+    """
+    Check that the status is UNKNOWN when configs are missing.
+    """
+    configs = None
+    [status, messages] = alert_sync_status.execute(configurations=configs)
+    self.assertEqual(status, RESULT_STATE_UNKNOWN)
+    self.assertTrue(messages is not None and len(messages) == 1)
+    self.assertEqual(messages[0], 'There were no configurations supplied to the script.')
+
+
+  @patch("alert_sync_status.get_sync_status")
+  def test_no_standby_state(self, get_sync_status_mock):
+    """
+    Test that the status is SKIPPED when HAWQSTANDBY is not in configurations
+    """
+    configs = {
+      "{{hawq-site/hawq_master_address_port}}": "5432"
+    }
+
+    # Mock calls
+    get_sync_status_mock.return_value = 'Not Configured'
+
+    [status, messages] = alert_sync_status.execute(configurations=configs)
+    self.assertEqual(status, RESULT_STATE_SKIPPED)
+    self.assertTrue(messages is not None and len(messages) == 1)
+    self.assertEqual(messages[0], 'HAWQSTANDBY is not installed.')
+
+
+  @patch("alert_sync_status.get_sync_status")
+  def test_synchronized_state(self, get_sync_status_mock):
+    """
+    Test that the status is OK when HAWQSTANDBY is 'Synchronized' with HAWQMASTER
+    """
+    configs = {
+      "{{hawq-site/hawq_master_address_port}}": "5432",
+      "{{hawq-site/hawq_standby_address_host}}": "c6402.ambari.apache.org"
+    }
+
+    # Mock calls
+    get_sync_status_mock.return_value = 'Synchronized'
+
+    [status, messages] = alert_sync_status.execute(configurations=configs)
+    self.assertEqual(status, RESULT_STATE_OK)
+    self.assertTrue(messages is not None and len(messages) == 1)
+    self.assertEqual(messages[0], 'HAWQSTANDBY is in sync with HAWQMASTER.')
+
+
+  @patch("alert_sync_status.get_sync_status")
+  def test_synchronizing_state(self, get_sync_status_mock):
+    """
+    Test that the status is OK when HAWQSTANDBY is 'Synchronizing' with HAWQMASTER
+    """
+    configs = {
+      "{{hawq-site/hawq_master_address_port}}": "5432",
+      "{{hawq-site/hawq_standby_address_host}}": "c6402.ambari.apache.org"
+    }
+
+    # Mock calls
+    get_sync_status_mock.return_value = 'Synchronizing'
+
+    [status, messages] = alert_sync_status.execute(configurations=configs)
+    self.assertEqual(status, RESULT_STATE_OK)
+    self.assertTrue(messages is not None and len(messages) == 1)
+    self.assertEqual(messages[0], 'HAWQSTANDBY is in sync with HAWQMASTER.')
+
+
+  @patch("alert_sync_status.get_sync_status")
+  def test_not_synchronized_state(self, get_sync_status_mock):
+    """
+    Test that the status is WARNING when HAWQSTANDBY is 'Noe Synchronized' with HAWQMASTER
+    """
+    configs = {
+      "{{hawq-site/hawq_master_address_port}}": "5432",
+      "{{hawq-site/hawq_standby_address_host}}": "c6402.ambari.apache.org"
+    }
+
+    # Mock calls
+    get_sync_status_mock.return_value = 'Not Synchronized'
+
+    [status, messages] = alert_sync_status.execute(configurations=configs)
+    self.assertEqual(status, RESULT_STATE_WARNING)
+    self.assertTrue(messages is not None and len(messages) == 1)
+    self.assertEqual(messages[0], 'HAWQSTANDBY is not in sync with HAWQMASTER.')
+
+
+  @patch("alert_sync_status.get_sync_status")
+  def test_none_state(self, get_sync_status_mock):
+    """
+    Test that the status is UNKNOWN when HAWQMASTER returns summary_state as 'None'
+    """
+    configs = {
+      "{{hawq-site/hawq_master_address_port}}": "5432",
+      "{{hawq-site/hawq_standby_address_host}}": "c6402.ambari.apache.org"
+    }
+
+    # Mock calls
+    get_sync_status_mock.return_value = 'None'
+
+    [status, messages] = alert_sync_status.execute(configurations=configs)
+    self.assertEqual(status, RESULT_STATE_UNKNOWN)
+    self.assertTrue(messages is not None and len(messages) == 1)
+    self.assertEqual(messages[0], 'Sync status cannot be determined.')
+
+
+  @patch("alert_sync_status.get_sync_status")
+  def test_not_configured_state(self, get_sync_status_mock):
+    """
+    Test that the status is UNKNOWN when HAWQMASTER returns summary_state as 'Not Configured'
+    """
+    configs = {
+      "{{hawq-site/hawq_master_address_port}}": "5432",
+      "{{hawq-site/hawq_standby_address_host}}": "c6402.ambari.apache.org"
+    }
+
+    # Mock calls
+    get_sync_status_mock.return_value = 'Not Configured'
+
+    [status, messages] = alert_sync_status.execute(configurations=configs)
+    self.assertEqual(status, RESULT_STATE_UNKNOWN)
+    self.assertTrue(messages is not None and len(messages) == 1)
+    self.assertEqual(messages[0], 'Sync status cannot be determined.')
+
+
+  @patch("alert_sync_status.get_sync_status")
+  def test_unknown_state(self, get_sync_status_mock):
+    """
+    Test that the status is UNKNOWN when HAWQMASTER returns summary_state as 'Unknown'
+    """
+    configs = {
+      "{{hawq-site/hawq_master_address_port}}": "5432",
+      "{{hawq-site/hawq_standby_address_host}}": "c6402.ambari.apache.org"
+    }
+
+    # Mock calls
+    get_sync_status_mock.return_value = 'Unknown'
+
+    [status, messages] = alert_sync_status.execute(configurations=configs)
+    self.assertEqual(status, RESULT_STATE_UNKNOWN)
+    self.assertTrue(messages is not None and len(messages) == 1)
+    self.assertEqual(messages[0], 'Sync status cannot be determined.')


[13/16] ambari git commit: AMBARI-14898. Alerts: Ability to customize props and thresholds on SCRIPT alerts via Ambari Web UI (onechiporenko)

Posted by nc...@apache.org.
AMBARI-14898. Alerts: Ability to customize props and thresholds on SCRIPT alerts via Ambari Web UI (onechiporenko)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/6d9e0599
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/6d9e0599
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/6d9e0599

Branch: refs/heads/branch-dev-patch-upgrade
Commit: 6d9e05995f6815a600021e0e84f3f29518989b36
Parents: 46f6030
Author: Oleg Nechiporenko <on...@apache.org>
Authored: Wed Feb 3 16:02:23 2016 +0200
Committer: Oleg Nechiporenko <on...@apache.org>
Committed: Wed Feb 3 18:12:10 2016 +0200

----------------------------------------------------------------------
 .../alerts/definition_configs_controller.js     |  29 ++++++
 .../app/mappers/alert_definitions_mapper.js     |  36 ++++---
 ambari-web/app/models/alerts/alert_config.js    |  62 +++++++++++-
 .../app/models/alerts/alert_definition.js       |   4 +-
 ambari-web/app/styles/alerts.less               |   4 +
 .../alerts/configs/alert_config_parameter.hbs   |  33 ++++++
 .../main/alerts/definition_configs_view.js      |  10 ++
 .../definitions_configs_controller_test.js      |  44 +++++++-
 .../mappers/alert_definitions_mapper_test.js    |  45 ++++++++-
 .../test/models/alerts/alert_config_test.js     | 100 +++++++++++++++++++
 10 files changed, 341 insertions(+), 26 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/6d9e0599/ambari-web/app/controllers/main/alerts/definition_configs_controller.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/controllers/main/alerts/definition_configs_controller.js b/ambari-web/app/controllers/main/alerts/definition_configs_controller.js
index 1b66f60..3fd5510 100644
--- a/ambari-web/app/controllers/main/alerts/definition_configs_controller.js
+++ b/ambari-web/app/controllers/main/alerts/definition_configs_controller.js
@@ -320,6 +320,23 @@ App.MainAlertDefinitionConfigsController = Em.Controller.extend({
       })
     ]);
 
+    var mixins = {
+      STRING: App.AlertConfigProperties.Parameters.StringMixin,
+      NUMERIC: App.AlertConfigProperties.Parameters.NumericMixin,
+      PERCENT: App.AlertConfigProperties.Parameters.PercentageMixin
+    };
+    alertDefinition.get('parameters').forEach(function (parameter) {
+      var mixin = mixins[parameter.get('type')] || {}; // validation depends on parameter-type
+      result.push(App.AlertConfigProperties.Parameter.create(mixin, {
+        value: isWizard ? '' : parameter.get('value'),
+        apiProperty: parameter.get('name'),
+        label: isWizard ? '' : parameter.get('displayName'),
+        threshold: isWizard ? '' : parameter.get('threshold'),
+        units: isWizard ? '' : parameter.get('units'),
+        type: isWizard ? '' : parameter.get('type'),
+      }));
+    });
+
     return result;
   },
 
@@ -478,6 +495,9 @@ App.MainAlertDefinitionConfigsController = Em.Controller.extend({
   getPropertiesToUpdate: function (onlyChanged) {
     var propertiesToUpdate = {};
     var configs = onlyChanged ? this.get('configs').filterProperty('wasChanged') : this.get('configs');
+    configs = configs.filter(function (c) {
+      return c.get('name') !== 'parameter';
+    });
     configs.forEach(function (property) {
       var apiProperties = property.get('apiProperty');
       var apiFormattedValues = property.get('apiFormattedValue');
@@ -521,6 +541,15 @@ App.MainAlertDefinitionConfigsController = Em.Controller.extend({
       }, this);
     }, this);
 
+    // `source.parameters` is an array and should be updated separately from other configs
+    if (this.get('content.parameters.length')) {
+      propertiesToUpdate['AlertDefinition/source/parameters'] = this.get('content.rawSourceData.parameters');
+      var parameterConfigs = this.get('configs').filterProperty('name', 'parameter');
+      parameterConfigs.forEach(function (parameter) {
+        propertiesToUpdate['AlertDefinition/source/parameters'].findProperty('name', parameter.get('apiProperty')).value = parameter.get('apiFormattedValue');
+      });
+    }
+
     return propertiesToUpdate;
   },
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/6d9e0599/ambari-web/app/mappers/alert_definitions_mapper.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/mappers/alert_definitions_mapper.js b/ambari-web/app/mappers/alert_definitions_mapper.js
index b027d67..7aae518 100644
--- a/ambari-web/app/mappers/alert_definitions_mapper.js
+++ b/ambari-web/app/mappers/alert_definitions_mapper.js
@@ -17,8 +17,6 @@
 
 var App = require('app');
 
-var stringUtils = require('utils/string_utils');
-
 App.alertDefinitionsMapper = App.QuickDataMapper.create({
 
   model: App.AlertDefinition,
@@ -44,7 +42,7 @@ App.alertDefinitionsMapper = App.QuickDataMapper.create({
     reporting: {
       item: 'id'
     },
-    parameters_key: 'reporting',
+    parameters_key: 'parameters',
     parameters_type: 'array',
     parameters: {
       item: 'id'
@@ -76,21 +74,11 @@ App.alertDefinitionsMapper = App.QuickDataMapper.create({
     connection_timeout: 'AlertDefinition.source.uri.connection_timeout'
   },
 
-  parameterConfig: {
-    id: 'AlertDefinition.source.parameters.id',
-    name: 'AlertDefinition.source.parameters.name',
-    display_name: 'AlertDefinition.source.parameters.display_name',
-    units: 'AlertDefinition.source.parameters.units',
-    value: 'AlertDefinition.source.parameters.value',
-    description: 'AlertDefinition.source.parameters.description',
-    type: 'AlertDefinition.source.parameters.type',
-    threshold: 'AlertDefinition.source.parameters.threshold'
-  },
-
   map: function (json) {
     console.time('App.alertDefinitionsMapper execution time');
     if (json && json.items) {
       var self = this,
+          parameters = [],
           alertDefinitions = [],
           alertReportDefinitions = [],
           alertMetricsSourceDefinitions = [],
@@ -123,8 +111,27 @@ App.alertDefinitionsMapper = App.QuickDataMapper.create({
           }
         }
 
+        var convertedParameters = [];
+        var sourceParameters = item.AlertDefinition.source.parameters;
+        if (Array.isArray(sourceParameters)) {
+          sourceParameters.forEach(function (parameter) {
+            convertedParameters.push({
+              id: item.AlertDefinition.id + parameter.name,
+              name: parameter.name,
+              display_name: parameter.display_name,
+              units: parameter.units,
+              value: parameter.value,
+              description: parameter.description,
+              type: parameter.type,
+              threshold: parameter.threshold
+            });
+          });
+        }
+
         alertReportDefinitions = alertReportDefinitions.concat(convertedReportDefinitions);
+        parameters = parameters.concat(convertedParameters);
         item.reporting = convertedReportDefinitions;
+        item.parameters = convertedParameters;
 
         rawSourceData[item.AlertDefinition.id] = item.AlertDefinition.source;
         item.AlertDefinition.description = item.AlertDefinition.description || '';
@@ -207,6 +214,7 @@ App.alertDefinitionsMapper = App.QuickDataMapper.create({
 
       // load all mapped data to model
       App.store.loadMany(this.get('reportModel'), alertReportDefinitions);
+      App.store.loadMany(this.get('parameterModel'), parameters);
       App.store.loadMany(this.get('metricsSourceModel'), alertMetricsSourceDefinitions);
       this.setMetricsSourcePropertyLists(this.get('metricsSourceModel'), alertMetricsSourceDefinitions);
       App.store.loadMany(this.get('metricsUriModel'), alertMetricsUriDefinitions);

http://git-wip-us.apache.org/repos/asf/ambari/blob/6d9e0599/ambari-web/app/models/alerts/alert_config.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/models/alerts/alert_config.js b/ambari-web/app/models/alerts/alert_config.js
index 4ef3edd..a9a8154 100644
--- a/ambari-web/app/models/alerts/alert_config.js
+++ b/ambari-web/app/models/alerts/alert_config.js
@@ -139,6 +139,8 @@ App.AlertConfigProperty = Ember.Object.extend({
         return App.AlertConfigThresholdView;
       case 'radioButton':
         return App.AlertConfigRadioButtonView;
+      case 'parameter':
+        return App.AlertConfigParameterView;
       default:
     }
   }.property('displayType'),
@@ -331,9 +333,7 @@ App.AlertConfigProperties = {
      * Custom css-class for different badges
      * type {string}
      */
-    badgeCssClass: function () {
-      return 'alert-state-' + this.get('badge');
-    }.property('badge'),
+    badgeCssClass: Em.computed.format('alert-state-{0}', 'badge'),
 
     /**
      * Determines if <code>value</code> or <code>text</code> were changed
@@ -476,10 +476,66 @@ App.AlertConfigProperties = {
     displayType: 'textArea',
     classNames: 'alert-config-text-area',
     apiProperty: Em.computed.ifThenElse('isJMXMetric', 'source.jmx.value', 'source.ganglia.value')
+  }),
+
+  Parameter: App.AlertConfigProperty.extend({
+
+    name: 'parameter',
+
+    displayType: 'parameter',
+
+    badge: Em.computed.alias('threshold'),
+
+    thresholdNotExists: Em.computed.empty('threshold'),
+
+    /**
+     * Custom css-class for different badges
+     * type {string}
+     */
+    badgeCssClass: Em.computed.format('alert-state-{0}', 'badge'),
+
   })
 
 };
+App.AlertConfigProperties.Parameters = {
 
+  StringMixin: Em.Mixin.create({
+    isValid: function () {
+      var value = this.get('value');
+      return String(value).trim() !== '';
+    }.property('value')
+  }),
+  NumericMixin: Em.Mixin.create({
+    isValid: function () {
+      var value = this.get('value');
+      if (!value) {
+        return false;
+      }
+      value = ('' + value).trim();
+      if (!numericUtils.isPositiveNumber(value)) {
+        return false;
+      }
+      value = parseFloat(value);
+      return !isNaN(value);
+    }.property('value')
+  }),
+  PercentageMixin: Em.Mixin.create({
+    isValid: function () {
+      var value = this.get('value');
+      if (!value) {
+        return false;
+      }
+      if (!validator.isValidFloat(value) || !numericUtils.isPositiveNumber(value)) {
+        return false;
+      }
+      value = String(value).trim();
+      value = parseFloat(value);
+
+      return !isNaN(value) && value > 0 && value <= 100;
+    }.property('value')
+  })
+
+};
 App.AlertConfigProperties.Thresholds = {
 
   OkThreshold: App.AlertConfigProperties.Threshold.extend({

http://git-wip-us.apache.org/repos/asf/ambari/blob/6d9e0599/ambari-web/app/models/alerts/alert_definition.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/models/alerts/alert_definition.js b/ambari-web/app/models/alerts/alert_definition.js
index 3f59e86..e91bd4f 100644
--- a/ambari-web/app/models/alerts/alert_definition.js
+++ b/ambari-web/app/models/alerts/alert_definition.js
@@ -315,8 +315,8 @@ App.AlertDefinition.reopenClass({
 App.AlertDefinitionParameter = DS.Model.extend({
   name: DS.attr('string'),
   displayName: DS.attr('string'),
-  unit: DS.attr('string'),
-  value: DS.attr('number'),
+  units: DS.attr('string'),
+  value: DS.attr('string'),
   description: DS.attr('string'),
   type: DS.attr('string'),
   threshold: DS.attr('string')

http://git-wip-us.apache.org/repos/asf/ambari/blob/6d9e0599/ambari-web/app/styles/alerts.less
----------------------------------------------------------------------
diff --git a/ambari-web/app/styles/alerts.less b/ambari-web/app/styles/alerts.less
index 2eabbe2..1063ecf 100644
--- a/ambari-web/app/styles/alerts.less
+++ b/ambari-web/app/styles/alerts.less
@@ -300,6 +300,10 @@
     width: 170px;
   }
 
+  .stuck-left {
+    margin-left: 0!important;
+  }
+
   .controls.shifted {
     margin-left: 190px;
   }

http://git-wip-us.apache.org/repos/asf/ambari/blob/6d9e0599/ambari-web/app/templates/main/alerts/configs/alert_config_parameter.hbs
----------------------------------------------------------------------
diff --git a/ambari-web/app/templates/main/alerts/configs/alert_config_parameter.hbs b/ambari-web/app/templates/main/alerts/configs/alert_config_parameter.hbs
new file mode 100644
index 0000000..fffa7bd
--- /dev/null
+++ b/ambari-web/app/templates/main/alerts/configs/alert_config_parameter.hbs
@@ -0,0 +1,33 @@
+{{!
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+}}
+
+<div>
+  {{#if view.property.threshold}}
+    <div class="span2 badge-container">
+      <span {{bindAttr class="view.property.badgeCssClass :alert-parameter-badge :alert-state-single-host view.property.threshold:label"}}>
+        {{view.property.badge}}
+        </span>&nbsp;
+    </div>
+  {{/if}}
+  <div {{bindAttr class="view.bigInput:span12:span3 view.property.units:input-append view.property.thresholdNotExists:stuck-left"}}>
+    {{view Em.TextField valueBinding="view.property.value" disabledBinding="view.property.isDisabled" class ="view.bigInput:span12:span7"}}
+    {{#if view.property.units}}
+      <span class="add-on">{{view.property.units}}</span>
+    {{/if}}
+  </div>
+</div>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/6d9e0599/ambari-web/app/views/main/alerts/definition_configs_view.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views/main/alerts/definition_configs_view.js b/ambari-web/app/views/main/alerts/definition_configs_view.js
index d909367..00e26d4 100644
--- a/ambari-web/app/views/main/alerts/definition_configs_view.js
+++ b/ambari-web/app/views/main/alerts/definition_configs_view.js
@@ -93,3 +93,13 @@ App.AlertConfigRadioButtonView = Em.Checkbox.extend({
 
   classNameBindings: ['property.classNames']
 });
+
+App.AlertConfigParameterView = Em.View.extend({
+
+  templateName: require('templates/main/alerts/configs/alert_config_parameter'),
+
+  bigInput: Em.computed.equal('property.type', 'STRING'),
+
+  classNameBindings: ['property.classNames', 'parentView.basicClass']
+
+});
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/6d9e0599/ambari-web/test/controllers/main/alerts/definitions_configs_controller_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/controllers/main/alerts/definitions_configs_controller_test.js b/ambari-web/test/controllers/main/alerts/definitions_configs_controller_test.js
index ae25d2d..4061f35 100644
--- a/ambari-web/test/controllers/main/alerts/definitions_configs_controller_test.js
+++ b/ambari-web/test/controllers/main/alerts/definitions_configs_controller_test.js
@@ -249,6 +249,10 @@ describe('App.MainAlertDefinitionConfigsController', function () {
         scope: 'HOST',
         description: 'alertDefinitionDescription',
         interval: 60,
+        parameters: [
+          Em.Object.create({}),
+          Em.Object.create({}),
+        ],
         reporting: [
           Em.Object.create({
             type: 'warning',
@@ -270,13 +274,13 @@ describe('App.MainAlertDefinitionConfigsController', function () {
     it('isWizard = true', function () {
       controller.set('isWizard', true);
       var result = controller.renderScriptConfigs();
-      expect(result.length).to.equal(8);
+      expect(result.length).to.equal(10);
     });
 
     it('isWizard = false', function () {
       controller.set('isWizard', false);
       var result = controller.renderScriptConfigs();
-      expect(result.length).to.equal(2);
+      expect(result.length).to.equal(4);
     });
 
   });
@@ -477,6 +481,42 @@ describe('App.MainAlertDefinitionConfigsController', function () {
         expect(result).to.eql(testCase.result);
       });
     });
+
+    describe('`source/parameters` for SCRIPT configs', function () {
+
+      beforeEach(function () {
+        controller.set('content', Em.Object.create({
+          parameters: [
+            Em.Object.create({name: 'p1', value: 'v1'}),
+            Em.Object.create({name: 'p2', value: 'v2'}),
+            Em.Object.create({name: 'p3', value: 'v3'}),
+            Em.Object.create({name: 'p4', value: 'v4'})
+          ],
+          rawSourceData: {
+            parameters: [
+              {name: 'p1', value: 'v1'},
+              {name: 'p2', value: 'v2'},
+              {name: 'p3', value: 'v3'},
+              {name: 'p4', value: 'v4'}
+            ]
+          }
+        }));
+        controller.set('configs', [
+          Em.Object.create({apiProperty:'p1', apiFormattedValue: 'v11', wasChanged: true, name: 'parameter'}),
+          Em.Object.create({apiProperty:'p2', apiFormattedValue: 'v21', wasChanged: true, name: 'parameter'}),
+          Em.Object.create({apiProperty:'p3', apiFormattedValue: 'v31', wasChanged: true, name: 'parameter'}),
+          Em.Object.create({apiProperty:'p4', apiFormattedValue: 'v41', wasChanged: true, name: 'parameter'})
+        ]);
+        this.result = controller.getPropertiesToUpdate();
+      });
+
+      it('should update parameters', function () {
+        expect(this.result['AlertDefinition/source/parameters']).to.have.property('length').equal(4);
+        expect(this.result['AlertDefinition/source/parameters'].mapProperty('value')).to.be.eql(['v11', 'v21', 'v31', 'v41']);
+      });
+
+    });
+
   });
 
   describe('#changeType()', function () {

http://git-wip-us.apache.org/repos/asf/ambari/blob/6d9e0599/ambari-web/test/mappers/alert_definitions_mapper_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/mappers/alert_definitions_mapper_test.js b/ambari-web/test/mappers/alert_definitions_mapper_test.js
index 6626187..564bf1d 100644
--- a/ambari-web/test/mappers/alert_definitions_mapper_test.js
+++ b/ambari-web/test/mappers/alert_definitions_mapper_test.js
@@ -21,7 +21,8 @@ require('mappers/alert_definitions_mapper');
 var testHelpers = require('test/helpers');
 
 describe('App.alertDefinitionsMapper', function () {
-  describe.skip('#map', function () {
+  /*eslint-disable mocha-cleanup/asserts-limit */
+  describe('#map', function () {
 
     var json = {
       items: [
@@ -148,6 +149,17 @@ describe('App.alertDefinitionsMapper', function () {
             "scope" : "HOST",
             "service_name" : "YARN",
             "source" : {
+              "parameters" : [
+                {
+                  "name" : "connection.timeout",
+                  "display_name" : "Connection Timeout",
+                  "units" : "seconds",
+                  "value" : 5.0,
+                  "description" : "The maximum time before this alert is considered to be CRITICAL",
+                  "type" : "NUMERIC",
+                  "threshold" : "CRITICAL"
+                }
+              ],
               "path" : "HDP/2.0.6/services/YARN/package/files/alert_nodemanager_health.py",
               "type" : "SCRIPT"
             }
@@ -187,7 +199,7 @@ describe('App.alertDefinitionsMapper', function () {
 
       App.alertDefinitionsMapper.setProperties({
         'model': {},
-
+        'parameterModel': {},
         'reportModel': {},
         'metricsSourceModel': {},
         'metricsUriModel': {}
@@ -352,7 +364,7 @@ describe('App.alertDefinitionsMapper', function () {
 
     });
 
-    it('should parse SCRIPT alertDefinitions', function () {
+    describe('should parse SCRIPT alertDefinitions', function () {
 
       var data = {items: [json.items[3]]},
         expected = [
@@ -370,9 +382,29 @@ describe('App.alertDefinitionsMapper', function () {
             "location":"HDP/2.0.6/services/YARN/package/files/alert_nodemanager_health.py"
           }
         ];
-      App.alertDefinitionsMapper.map(data);
 
-      testHelpers.nestedExpect(expected, App.alertDefinitionsMapper.get('model.content'));
+      var expectedParameters = [{
+        "id": "4connection.timeout",
+        "name": "connection.timeout",
+        "display_name": "Connection Timeout",
+        "units": "seconds",
+        "value": 5,
+        "description": "The maximum time before this alert is considered to be CRITICAL",
+        "type": "NUMERIC",
+        "threshold": "CRITICAL"
+      }];
+
+      beforeEach(function () {
+        App.alertDefinitionsMapper.map(data);
+      });
+
+      it('should map definition', function () {
+        testHelpers.nestedExpect(expected, App.alertDefinitionsMapper.get('model.content'));
+      });
+
+      it('should map parameters', function () {
+        testHelpers.nestedExpect(expectedParameters, App.alertDefinitionsMapper.get('parameterModel.content'));
+      });
 
     });
 
@@ -401,6 +433,7 @@ describe('App.alertDefinitionsMapper', function () {
 
     });
 
+    /*eslint-disable mocha-cleanup/complexity-it */
     it('should set groups from App.cache.previousAlertGroupsMap', function () {
 
       App.cache.previousAlertGroupsMap = {
@@ -421,6 +454,7 @@ describe('App.alertDefinitionsMapper', function () {
 
 
     });
+    /*eslint-enable mocha-cleanup/complexity-it */
 
     describe('should delete not existing definitions', function () {
 
@@ -450,5 +484,6 @@ describe('App.alertDefinitionsMapper', function () {
     });
 
   });
+  /*eslint-enable mocha-cleanup/asserts-limit */
 
 });
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/6d9e0599/ambari-web/test/models/alerts/alert_config_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/models/alerts/alert_config_test.js b/ambari-web/test/models/alerts/alert_config_test.js
index 236fcde..4b788f8 100644
--- a/ambari-web/test/models/alerts/alert_config_test.js
+++ b/ambari-web/test/models/alerts/alert_config_test.js
@@ -24,6 +24,106 @@ var model;
 
 describe('App.AlertConfigProperties', function () {
 
+  describe('Parameter', function () {
+
+    function getModel() {
+      return App.AlertConfigProperties.Parameter.create();
+    }
+
+    App.TestAliases.testAsComputedAlias(getModel(), 'badge', 'threshold');
+
+  });
+
+  describe('App.AlertConfigProperties.Parameters', function () {
+
+    describe('StringMixin', function () {
+
+      var obj;
+
+      beforeEach(function () {
+        obj = App.AlertConfigProperties.Parameter.create(App.AlertConfigProperties.Parameters.StringMixin, {});
+      });
+
+      describe('#isValid', function () {
+        Em.A([
+          {value: '', expected: false},
+          {value: '\t', expected: false},
+          {value: '    ', expected: false},
+          {value: '\n', expected: false},
+          {value: '\r', expected: false},
+          {value: 'some not empty string', expected: true}
+        ]).forEach(function (test) {
+          it('value: ' + JSON.stringify(test.value) + ' ;result - ' + test.expected, function () {
+            obj.set('value', test.value);
+            expect(obj.get('isValid')).to.be.equal(test.expected);
+          });
+        });
+      });
+
+    });
+
+    describe('NumericMixin', function () {
+
+      var obj;
+
+      beforeEach(function () {
+        obj = App.AlertConfigProperties.Parameter.create(App.AlertConfigProperties.Parameters.NumericMixin, {});
+      });
+
+      describe('#isValid', function () {
+        Em.A([
+          {value: '', expected: false},
+          {value: 'abc', expected: false},
+          {value: 'g1', expected: false},
+          {value: '1g', expected: false},
+          {value: '123', expected: true},
+          {value: '123.8', expected: true},
+          {value: 123, expected: true},
+          {value: 123.8, expected: true},
+        ]).forEach(function (test) {
+          it('value: ' + JSON.stringify(test.value) + ' ;result - ' + test.expected, function () {
+            obj.set('value', test.value);
+            expect(obj.get('isValid')).to.be.equal(test.expected);
+          });
+        });
+      });
+
+    });
+
+    describe('PercentageMixin', function () {
+
+      var obj;
+
+      beforeEach(function () {
+        obj = App.AlertConfigProperties.Parameter.create(App.AlertConfigProperties.Parameters.PercentageMixin, {});
+      });
+
+      describe('#isValid', function () {
+        Em.A([
+          {value: '', expected: false},
+          {value: 'abc', expected: false},
+          {value: 'g1', expected: false},
+          {value: '1g', expected: false},
+          {value: '123', expected: false},
+          {value: '23', expected: true},
+          {value: '123.8', expected: false},
+          {value: '5.8', expected: true},
+          {value: 123, expected: false},
+          {value: 23, expected: true},
+          {value: 123.8, expected: false},
+          {value: 5.8, expected: true}
+        ]).forEach(function (test) {
+          it('value: ' + JSON.stringify(test.value) + ' ;result - ' + test.expected, function () {
+            obj.set('value', test.value);
+            expect(obj.get('isValid')).to.be.equal(test.expected);
+          });
+        });
+      });
+
+    });
+
+  });
+
   describe('Threshold', function () {
 
     beforeEach(function () {


[06/16] ambari git commit: AMBARI-14882. AMS aggregates Counter values as average over the timeseries (and other issues). (swagle)

Posted by nc...@apache.org.
AMBARI-14882. AMS aggregates Counter values as average over the timeseries (and other issues). (swagle)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/58b91c84
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/58b91c84
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/58b91c84

Branch: refs/heads/branch-dev-patch-upgrade
Commit: 58b91c845e6a260602373499c22a384f4ca7cbdb
Parents: 7632b7b
Author: Siddharth Wagle <sw...@hortonworks.com>
Authored: Tue Feb 2 16:38:03 2016 -0800
Committer: Siddharth Wagle <sw...@hortonworks.com>
Committed: Tue Feb 2 16:38:03 2016 -0800

----------------------------------------------------------------------
 .../timeline/AbstractTimelineMetricsSink.java   |   4 +-
 .../metrics2/sink/timeline/TimelineMetric.java  |  13 ++-
 .../sink/timeline/TimelineMetricMetadata.java   |  15 +--
 .../timeline/cache/TimelineMetricsCache.java    |   9 +-
 .../sink/flume/FlumeTimelineMetricsSink.java    |   7 +-
 .../timeline/HadoopTimelineMetricsSink.java     |   6 +-
 .../timeline/HadoopTimelineMetricsSinkTest.java |   5 +-
 .../kafka/KafkaTimelineMetricsReporter.java     |  16 +--
 .../storm/StormTimelineMetricsReporter.java     |   2 -
 .../timeline/HBaseTimelineMetricStore.java      |  19 ++--
 .../metrics/timeline/PhoenixHBaseAccessor.java  | 111 +++++++------------
 .../metrics/timeline/aggregators/Function.java  |  75 ++++++++++---
 .../aggregators/TimelineMetricReadHelper.java   |  38 +++++++
 .../TimelineMetricMetadataManager.java          |   5 +-
 .../metrics/timeline/FunctionTest.java          |  10 +-
 .../timeline/HBaseTimelineMetricStoreTest.java  |  31 +++++-
 16 files changed, 233 insertions(+), 133 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/58b91c84/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/AbstractTimelineMetricsSink.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/AbstractTimelineMetricsSink.java b/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/AbstractTimelineMetricsSink.java
index 6d7c55f..9173889 100644
--- a/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/AbstractTimelineMetricsSink.java
+++ b/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/AbstractTimelineMetricsSink.java
@@ -37,6 +37,7 @@ public abstract class AbstractTimelineMetricsSink {
   public static final String COLLECTOR_HOST_PROPERTY = "collector";
   public static final String COLLECTOR_PORT_PROPERTY = "port";
   public static final int DEFAULT_POST_TIMEOUT_SECONDS = 10;
+  public static final String SKIP_COUNTER_TRANSFROMATION = "skipCounterDerivative";
 
   protected final Log LOG;
 
@@ -60,8 +61,7 @@ public abstract class AbstractTimelineMetricsSink {
     try {
       String jsonData = mapper.writeValueAsString(metrics);
 
-      HttpURLConnection connection =
-        (HttpURLConnection) new URL(connectUrl).openConnection();
+      HttpURLConnection connection = (HttpURLConnection) new URL(connectUrl).openConnection();
 
       connection.setRequestMethod("POST");
       connection.setRequestProperty("Content-Type", "application/json");

http://git-wip-us.apache.org/repos/asf/ambari/blob/58b91c84/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/TimelineMetric.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/TimelineMetric.java b/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/TimelineMetric.java
index e4dc423..98f4978 100644
--- a/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/TimelineMetric.java
+++ b/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/TimelineMetric.java
@@ -42,6 +42,7 @@ public class TimelineMetric implements Comparable<TimelineMetric> {
   private long timestamp;
   private long startTime;
   private String type;
+  private String units;
   private TreeMap<Long, Double> metricValues = new TreeMap<Long, Double>();
 
   // default
@@ -53,6 +54,7 @@ public class TimelineMetric implements Comparable<TimelineMetric> {
   public TimelineMetric(TimelineMetric metric) {
     setMetricName(metric.getMetricName());
     setType(metric.getType());
+    setUnits(metric.getUnits());
     setTimestamp(metric.getTimestamp());
     setAppId(metric.getAppId());
     setInstanceId(metric.getInstanceId());
@@ -115,7 +117,7 @@ public class TimelineMetric implements Comparable<TimelineMetric> {
     this.startTime = startTime;
   }
 
-  @XmlElement(name = "type")
+  @XmlElement(name = "type", defaultValue = "UNDEFINED")
   public String getType() {
     return type;
   }
@@ -124,6 +126,15 @@ public class TimelineMetric implements Comparable<TimelineMetric> {
     this.type = type;
   }
 
+  @XmlElement(name = "units")
+  public String getUnits() {
+    return units;
+  }
+
+  public void setUnits(String units) {
+    this.units = units;
+  }
+
   @XmlElement(name = "metrics")
   public TreeMap<Long, Double> getMetricValues() {
     return metricValues;

http://git-wip-us.apache.org/repos/asf/ambari/blob/58b91c84/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/TimelineMetricMetadata.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/TimelineMetricMetadata.java b/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/TimelineMetricMetadata.java
index 0624f9c..1f413a0 100644
--- a/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/TimelineMetricMetadata.java
+++ b/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/TimelineMetricMetadata.java
@@ -33,16 +33,17 @@ public class TimelineMetricMetadata {
   private String metricName;
   private String appId;
   private String units;
-  private MetricType type = MetricType.UNDEFINED;
+  private String type = "UNDEFINED";
   private Long seriesStartTime;
   boolean supportsAggregates = true;
   // Serialization ignored helper flag
   boolean isPersisted = false;
 
+  // Placeholder to add more type later
   public enum MetricType {
-    GAUGE, // Can vary in both directions
-    COUNTER, // Single dimension
-    UNDEFINED // Default
+    GAUGE,
+    COUNTER,
+    UNDEFINED
   }
 
   // Default constructor
@@ -50,7 +51,7 @@ public class TimelineMetricMetadata {
   }
 
   public TimelineMetricMetadata(String metricName, String appId, String units,
-                                MetricType type, Long seriesStartTime,
+                                String type, Long seriesStartTime,
                                 boolean supportsAggregates) {
     this.metricName = metricName;
     this.appId = appId;
@@ -89,11 +90,11 @@ public class TimelineMetricMetadata {
   }
 
   @XmlElement(name = "type")
-  public MetricType getType() {
+  public String getType() {
     return type;
   }
 
-  public void setType(MetricType type) {
+  public void setType(String type) {
     this.type = type;
   }
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/58b91c84/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/cache/TimelineMetricsCache.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/cache/TimelineMetricsCache.java b/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/cache/TimelineMetricsCache.java
index 4e9e36e..15bd5f4 100644
--- a/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/cache/TimelineMetricsCache.java
+++ b/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/cache/TimelineMetricsCache.java
@@ -38,11 +38,18 @@ public class TimelineMetricsCache {
   public static final int MAX_EVICTION_TIME_MILLIS = 59000; // ~ 1 min
   private final int maxRecsPerName;
   private final int maxEvictionTimeInMillis;
+  private boolean skipCounterTransform = true;
   private final Map<String, Double> counterMetricLastValue = new HashMap<String, Double>();
 
   public TimelineMetricsCache(int maxRecsPerName, int maxEvictionTimeInMillis) {
+    this(maxRecsPerName, maxEvictionTimeInMillis, false);
+  }
+
+  public TimelineMetricsCache(int maxRecsPerName, int maxEvictionTimeInMillis,
+                              boolean skipCounterTransform) {
     this.maxRecsPerName = maxRecsPerName;
     this.maxEvictionTimeInMillis = maxEvictionTimeInMillis;
+    this.skipCounterTransform = skipCounterTransform;
   }
 
   class TimelineMetricWrapper {
@@ -171,7 +178,7 @@ public class TimelineMetricsCache {
   }
 
   public void putTimelineMetric(TimelineMetric timelineMetric, boolean isCounter) {
-    if (isCounter) {
+    if (isCounter && !skipCounterTransform) {
       transformMetricValuesToDerivative(timelineMetric);
     }
     putTimelineMetric(timelineMetric);

http://git-wip-us.apache.org/repos/asf/ambari/blob/58b91c84/ambari-metrics/ambari-metrics-flume-sink/src/main/java/org/apache/hadoop/metrics2/sink/flume/FlumeTimelineMetricsSink.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-flume-sink/src/main/java/org/apache/hadoop/metrics2/sink/flume/FlumeTimelineMetricsSink.java b/ambari-metrics/ambari-metrics-flume-sink/src/main/java/org/apache/hadoop/metrics2/sink/flume/FlumeTimelineMetricsSink.java
index 0257ada..cf2b4ae 100644
--- a/ambari-metrics/ambari-metrics-flume-sink/src/main/java/org/apache/hadoop/metrics2/sink/flume/FlumeTimelineMetricsSink.java
+++ b/ambari-metrics/ambari-metrics-flume-sink/src/main/java/org/apache/hadoop/metrics2/sink/flume/FlumeTimelineMetricsSink.java
@@ -135,12 +135,11 @@ public class FlumeTimelineMetricsSink extends AbstractTimelineMetricsSink implem
     public void run() {
       LOG.debug("Collecting Metrics for Flume");
       try {
-        Map<String, Map<String, String>> metricsMap =
-            JMXPollUtil.getAllMBeans();
+        Map<String, Map<String, String>> metricsMap = JMXPollUtil.getAllMBeans();
         long currentTimeMillis = System.currentTimeMillis();
         for (String component : metricsMap.keySet()) {
           Map<String, String> attributeMap = metricsMap.get(component);
-          LOG.info("Attributes for component " + component);
+          LOG.debug("Attributes for component " + component);
           processComponentAttributes(currentTimeMillis, component, attributeMap);
         }
       } catch (UnableToConnectException uce) {
@@ -188,8 +187,6 @@ public class FlumeTimelineMetricsSink extends AbstractTimelineMetricsSink implem
       timelineMetric.setInstanceId(component);
       timelineMetric.setAppId("FLUME_HANDLER");
       timelineMetric.setStartTime(currentTimeMillis);
-      timelineMetric.setType(ClassUtils.getShortCanonicalName(
-          attributeValue, "Number"));
       timelineMetric.getMetricValues().put(currentTimeMillis, Double.parseDouble(attributeValue));
       return timelineMetric;
     }

http://git-wip-us.apache.org/repos/asf/ambari/blob/58b91c84/ambari-metrics/ambari-metrics-hadoop-sink/src/main/java/org/apache/hadoop/metrics2/sink/timeline/HadoopTimelineMetricsSink.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-hadoop-sink/src/main/java/org/apache/hadoop/metrics2/sink/timeline/HadoopTimelineMetricsSink.java b/ambari-metrics/ambari-metrics-hadoop-sink/src/main/java/org/apache/hadoop/metrics2/sink/timeline/HadoopTimelineMetricsSink.java
index f23dc42..000b82e 100644
--- a/ambari-metrics/ambari-metrics-hadoop-sink/src/main/java/org/apache/hadoop/metrics2/sink/timeline/HadoopTimelineMetricsSink.java
+++ b/ambari-metrics/ambari-metrics-hadoop-sink/src/main/java/org/apache/hadoop/metrics2/sink/timeline/HadoopTimelineMetricsSink.java
@@ -96,7 +96,9 @@ public class HadoopTimelineMetricsSink extends AbstractTimelineMetricsSink imple
       TimelineMetricsCache.MAX_RECS_PER_NAME_DEFAULT);
     int metricsSendInterval = conf.getInt(METRICS_SEND_INTERVAL,
       TimelineMetricsCache.MAX_EVICTION_TIME_MILLIS); // ~ 1 min
-    metricsCache = new TimelineMetricsCache(maxRowCacheSize, metricsSendInterval);
+    // Skip aggregation of counter values by calculating derivative
+    metricsCache = new TimelineMetricsCache(maxRowCacheSize,
+      metricsSendInterval, conf.getBoolean(SKIP_COUNTER_TRANSFROMATION, true));
 
     conf.setListDelimiter(',');
     Iterator<String> it = (Iterator<String>) conf.getKeys();
@@ -186,7 +188,7 @@ public class HadoopTimelineMetricsSink extends AbstractTimelineMetricsSink imple
         timelineMetric.setHostName(hostName);
         timelineMetric.setAppId(serviceName);
         timelineMetric.setStartTime(startTime);
-        timelineMetric.setType(ClassUtils.getShortCanonicalName(value, "Number"));
+        timelineMetric.setType(metric.type() != null ? metric.type().name() : null);
         timelineMetric.getMetricValues().put(startTime, value.doubleValue());
         // Put intermediate values into the cache until it is time to send
         boolean isCounter = MetricType.COUNTER == metric.type();

http://git-wip-us.apache.org/repos/asf/ambari/blob/58b91c84/ambari-metrics/ambari-metrics-hadoop-sink/src/test/java/org/apache/hadoop/metrics2/sink/timeline/HadoopTimelineMetricsSinkTest.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-hadoop-sink/src/test/java/org/apache/hadoop/metrics2/sink/timeline/HadoopTimelineMetricsSinkTest.java b/ambari-metrics/ambari-metrics-hadoop-sink/src/test/java/org/apache/hadoop/metrics2/sink/timeline/HadoopTimelineMetricsSinkTest.java
index a69b7c7..6b23f36 100644
--- a/ambari-metrics/ambari-metrics-hadoop-sink/src/test/java/org/apache/hadoop/metrics2/sink/timeline/HadoopTimelineMetricsSinkTest.java
+++ b/ambari-metrics/ambari-metrics-hadoop-sink/src/test/java/org/apache/hadoop/metrics2/sink/timeline/HadoopTimelineMetricsSinkTest.java
@@ -40,6 +40,7 @@ import java.util.List;
 
 import org.apache.commons.configuration.SubsetConfiguration;
 import org.apache.hadoop.metrics2.AbstractMetric;
+import org.apache.hadoop.metrics2.MetricType;
 import org.apache.hadoop.metrics2.MetricsRecord;
 import org.easymock.EasyMock;
 import org.easymock.IAnswer;
@@ -90,6 +91,7 @@ public class HadoopTimelineMetricsSinkTest {
     AbstractMetric metric = createNiceMock(AbstractMetric.class);
     expect(metric.name()).andReturn("metricName").anyTimes();
     expect(metric.value()).andReturn(9.5687).anyTimes();
+    expect(metric.type()).andReturn(MetricType.COUNTER).anyTimes();
     //TODO currently only numeric metrics are supported
 
     MetricsRecord record = createNiceMock(MetricsRecord.class);
@@ -104,7 +106,6 @@ public class HadoopTimelineMetricsSinkTest {
 
     expect(record.metrics()).andReturn(Arrays.asList(metric)).anyTimes();
 
-
     replay(conf, record, metric);
 
     sink.init(conf);
@@ -239,4 +240,6 @@ public class HadoopTimelineMetricsSinkTest {
     Assert.assertEquals(new Double(5.0), values.next());
     Assert.assertEquals(new Double(6.0), values.next());
   }
+
+
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/58b91c84/ambari-metrics/ambari-metrics-kafka-sink/src/main/java/org/apache/hadoop/metrics2/sink/kafka/KafkaTimelineMetricsReporter.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-kafka-sink/src/main/java/org/apache/hadoop/metrics2/sink/kafka/KafkaTimelineMetricsReporter.java b/ambari-metrics/ambari-metrics-kafka-sink/src/main/java/org/apache/hadoop/metrics2/sink/kafka/KafkaTimelineMetricsReporter.java
index ff2db1d..4915435 100644
--- a/ambari-metrics/ambari-metrics-kafka-sink/src/main/java/org/apache/hadoop/metrics2/sink/kafka/KafkaTimelineMetricsReporter.java
+++ b/ambari-metrics/ambari-metrics-kafka-sink/src/main/java/org/apache/hadoop/metrics2/sink/kafka/KafkaTimelineMetricsReporter.java
@@ -42,7 +42,6 @@ import org.apache.hadoop.metrics2.sink.timeline.AbstractTimelineMetricsSink;
 import org.apache.hadoop.metrics2.sink.timeline.TimelineMetric;
 import org.apache.hadoop.metrics2.sink.timeline.TimelineMetrics;
 import org.apache.hadoop.metrics2.sink.timeline.cache.TimelineMetricsCache;
-
 import java.net.InetAddress;
 import java.net.UnknownHostException;
 import java.util.ArrayList;
@@ -51,7 +50,7 @@ import java.util.List;
 import java.util.Map.Entry;
 import java.util.Set;
 import java.util.concurrent.TimeUnit;
-
+import static org.apache.hadoop.metrics2.sink.timeline.TimelineMetricMetadata.MetricType;
 import static org.apache.hadoop.metrics2.sink.timeline.cache.TimelineMetricsCache.MAX_EVICTION_TIME_MILLIS;
 import static org.apache.hadoop.metrics2.sink.timeline.cache.TimelineMetricsCache.MAX_RECS_PER_NAME_DEFAULT;
 
@@ -280,7 +279,7 @@ public class KafkaTimelineMetricsReporter extends AbstractTimelineMetricsSink
 
       String[] metricNames = cacheKafkaMetered(currentTimeMillis, sanitizedName, meter);
 
-      populateMetricsList(context, metricNames);
+      populateMetricsList(context, MetricType.GAUGE, metricNames);
     }
 
     @Override
@@ -291,7 +290,7 @@ public class KafkaTimelineMetricsReporter extends AbstractTimelineMetricsSink
       final String metricCountName = cacheSanitizedTimelineMetric(currentTimeMillis, sanitizedName,
           COUNT_SUFIX, counter.count());
 
-      populateMetricsList(context, metricCountName);
+      populateMetricsList(context, MetricType.COUNTER, metricCountName);
     }
 
     @Override
@@ -305,7 +304,7 @@ public class KafkaTimelineMetricsReporter extends AbstractTimelineMetricsSink
 
       String[] metricNames = (String[]) ArrayUtils.addAll(metricHNames, metricSNames);
 
-      populateMetricsList(context, metricNames);
+      populateMetricsList(context, MetricType.GAUGE, metricNames);
     }
 
     @Override
@@ -321,7 +320,7 @@ public class KafkaTimelineMetricsReporter extends AbstractTimelineMetricsSink
       String[] metricNames = (String[]) ArrayUtils.addAll(metricMNames, metricTNames);
       metricNames = (String[]) ArrayUtils.addAll(metricNames, metricSNames);
 
-      populateMetricsList(context, metricNames);
+      populateMetricsList(context, MetricType.GAUGE, metricNames);
     }
 
     @Override
@@ -331,7 +330,7 @@ public class KafkaTimelineMetricsReporter extends AbstractTimelineMetricsSink
 
       cacheSanitizedTimelineMetric(currentTimeMillis, sanitizedName, "", Double.parseDouble(String.valueOf(gauge.value())));
 
-      populateMetricsList(context, sanitizedName);
+      populateMetricsList(context, MetricType.GAUGE, sanitizedName);
     }
 
     private String[] cacheKafkaMetered(long currentTimeMillis, String sanitizedName, Metered meter) {
@@ -393,10 +392,11 @@ public class KafkaTimelineMetricsReporter extends AbstractTimelineMetricsSink
       return meterName;
     }
 
-    private void populateMetricsList(Context context, String... metricNames) {
+    private void populateMetricsList(Context context, MetricType type, String... metricNames) {
       for (String metricName : metricNames) {
         TimelineMetric cachedMetric = metricsCache.getTimelineMetric(metricName);
         if (cachedMetric != null) {
+          cachedMetric.setType(type.name());
           context.getTimelineMetricList().add(cachedMetric);
         }
       }

http://git-wip-us.apache.org/repos/asf/ambari/blob/58b91c84/ambari-metrics/ambari-metrics-storm-sink/src/main/java/org/apache/hadoop/metrics2/sink/storm/StormTimelineMetricsReporter.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-storm-sink/src/main/java/org/apache/hadoop/metrics2/sink/storm/StormTimelineMetricsReporter.java b/ambari-metrics/ambari-metrics-storm-sink/src/main/java/org/apache/hadoop/metrics2/sink/storm/StormTimelineMetricsReporter.java
index 73e3de8..f054f16 100644
--- a/ambari-metrics/ambari-metrics-storm-sink/src/main/java/org/apache/hadoop/metrics2/sink/storm/StormTimelineMetricsReporter.java
+++ b/ambari-metrics/ambari-metrics-storm-sink/src/main/java/org/apache/hadoop/metrics2/sink/storm/StormTimelineMetricsReporter.java
@@ -153,8 +153,6 @@ public class StormTimelineMetricsReporter extends AbstractTimelineMetricsSink
     timelineMetric.setHostName(hostname);
     timelineMetric.setAppId(component);
     timelineMetric.setStartTime(currentTimeMillis);
-    timelineMetric.setType(ClassUtils.getShortCanonicalName(
-      attributeValue, "Number"));
     timelineMetric.getMetricValues().put(currentTimeMillis, Double.parseDouble(attributeValue));
     return timelineMetric;
   }

http://git-wip-us.apache.org/repos/asf/ambari/blob/58b91c84/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/HBaseTimelineMetricStore.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/HBaseTimelineMetricStore.java b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/HBaseTimelineMetricStore.java
index c30a354..5ee8b44 100644
--- a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/HBaseTimelineMetricStore.java
+++ b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/HBaseTimelineMetricStore.java
@@ -202,16 +202,18 @@ public class HBaseTimelineMetricStore extends AbstractService implements Timelin
     for (TimelineMetric metric : metricsList){
       String name = metric.getMetricName();
       if (name.contains("._rate")){
-        updateValueAsRate(metric.getMetricValues());
+        updateValuesAsRate(metric.getMetricValues());
       }
     }
 
     return metrics;
   }
 
-  private Map<Long, Double> updateValueAsRate(Map<Long, Double> metricValues) {
+  static Map<Long, Double> updateValuesAsRate(Map<Long, Double> metricValues) {
     Long prevTime = null;
+    Double prevVal = null;
     long step;
+    Double diff;
 
     for (Map.Entry<Long, Double> timeValueEntry : metricValues.entrySet()) {
       Long currTime = timeValueEntry.getKey();
@@ -219,21 +221,22 @@ public class HBaseTimelineMetricStore extends AbstractService implements Timelin
 
       if (prevTime != null) {
         step = currTime - prevTime;
-        Double rate = currVal / TimeUnit.MILLISECONDS.toSeconds(step);
+        diff = currVal - prevVal;
+        Double rate = diff / TimeUnit.MILLISECONDS.toSeconds(step);
         timeValueEntry.setValue(rate);
       } else {
         timeValueEntry.setValue(0.0);
       }
 
       prevTime = currTime;
+      prevVal = currVal;
     }
 
     return metricValues;
   }
 
-  public static HashMap<String, List<Function>> parseMetricNamesToAggregationFunctions(List<String> metricNames) {
-    HashMap<String, List<Function>> metricsFunctions = new HashMap<String,
-      List<Function>>();
+  static HashMap<String, List<Function>> parseMetricNamesToAggregationFunctions(List<String> metricNames) {
+    HashMap<String, List<Function>> metricsFunctions = new HashMap<>();
 
     for (String metricName : metricNames){
       Function function = Function.DEFAULT_VALUE_FUNCTION;
@@ -242,7 +245,7 @@ public class HBaseTimelineMetricStore extends AbstractService implements Timelin
       try {
         function = Function.fromMetricName(metricName);
         int functionStartIndex = metricName.indexOf("._");
-        if(functionStartIndex > 0 ) {
+        if (functionStartIndex > 0) {
           cleanMetricName = metricName.substring(0, functionStartIndex);
         }
       } catch (Function.FunctionFormatException ffe){
@@ -252,7 +255,7 @@ public class HBaseTimelineMetricStore extends AbstractService implements Timelin
 
       List<Function> functionsList = metricsFunctions.get(cleanMetricName);
       if (functionsList == null) {
-        functionsList = new ArrayList<Function>(1);
+        functionsList = new ArrayList<>(1);
       }
       functionsList.add(function);
       metricsFunctions.put(cleanMetricName, functionsList);

http://git-wip-us.apache.org/repos/asf/ambari/blob/58b91c84/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/PhoenixHBaseAccessor.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/PhoenixHBaseAccessor.java b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/PhoenixHBaseAccessor.java
index 980c4af..4149e8d 100644
--- a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/PhoenixHBaseAccessor.java
+++ b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/PhoenixHBaseAccessor.java
@@ -17,7 +17,6 @@
  */
 package org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline;
 
-import com.google.common.base.Enums;
 import org.apache.commons.lang.StringUtils;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
@@ -66,7 +65,6 @@ import java.util.TreeMap;
 import java.util.concurrent.TimeUnit;
 
 import static java.util.concurrent.TimeUnit.SECONDS;
-import static org.apache.hadoop.metrics2.sink.timeline.TimelineMetricMetadata.*;
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.TimelineMetricConfiguration.AGGREGATE_TABLE_SPLIT_POINTS;
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.TimelineMetricConfiguration.AGGREGATORS_SKIP_BLOCK_CACHE;
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.TimelineMetricConfiguration.CLUSTER_DAILY_TABLE_TTL;
@@ -166,51 +164,12 @@ public class PhoenixHBaseAccessor {
   }
 
   private static TimelineMetric getLastTimelineMetricFromResultSet(ResultSet rs)
-    throws SQLException, IOException {
+      throws SQLException, IOException {
     TimelineMetric metric = TIMELINE_METRIC_READ_HELPER.getTimelineMetricCommonsFromResultSet(rs);
     metric.setMetricValues(readLastMetricValueFromJSON(rs.getString("METRICS")));
     return metric;
   }
 
-  public static SingleValuedTimelineMetric getAggregatedTimelineMetricFromResultSet(
-      ResultSet rs, Function f) throws SQLException, IOException {
-
-    SingleValuedTimelineMetric metric = new SingleValuedTimelineMetric(
-      rs.getString("METRIC_NAME") + f.getSuffix(),
-      rs.getString("APP_ID"),
-      rs.getString("INSTANCE_ID"),
-      rs.getString("HOSTNAME"),
-      rs.getLong("SERVER_TIME"),
-      rs.getLong("SERVER_TIME"),
-      rs.getString("UNITS")
-    );
-
-    // get functions for metricnames
-
-    double value;
-    switch(f.getReadFunction()){
-      case AVG:
-        value = rs.getDouble("METRIC_SUM") / rs.getInt("METRIC_COUNT");
-        break;
-      case MIN:
-        value = rs.getDouble("METRIC_MIN");
-        break;
-      case MAX:
-        value = rs.getDouble("METRIC_MAX");
-        break;
-      case SUM:
-        value = rs.getDouble("METRIC_SUM");
-        break;
-      default:
-        value = rs.getDouble("METRIC_SUM") / rs.getInt("METRIC_COUNT");
-        break;
-    }
-
-    metric.setSingleTimeseriesValue(rs.getLong("SERVER_TIME"), value);
-
-    return metric;
-  }
-
   private static TreeMap<Long, Double> readLastMetricValueFromJSON(String json)
       throws IOException {
     TreeMap<Long, Double> values = readMetricFromJSON(json);
@@ -436,7 +395,7 @@ public class PhoenixHBaseAccessor {
         metricRecordStmt.setString(4, metric.getInstanceId());
         metricRecordStmt.setLong(5, currentTime);
         metricRecordStmt.setLong(6, metric.getStartTime());
-        metricRecordStmt.setString(7, metric.getType());
+        metricRecordStmt.setString(7, metric.getUnits());
         metricRecordStmt.setDouble(8, aggregates[0]);
         metricRecordStmt.setDouble(9, aggregates[1]);
         metricRecordStmt.setDouble(10, aggregates[2]);
@@ -498,7 +457,7 @@ public class PhoenixHBaseAccessor {
 
     try {
       //get latest
-      if(condition.isPointInTime()){
+      if (condition.isPointInTime()){
         getLatestMetricRecords(condition, conn, metrics);
       } else {
         if (condition.getEndTime() >= condition.getStartTime()) {
@@ -580,19 +539,24 @@ public class PhoenixHBaseAccessor {
     return metrics;
   }
 
-  private void appendMetricFromResultSet(
-      TimelineMetrics metrics, Condition condition, Map<String,
-      List<Function>> metricFunctions, ResultSet rs)
-      throws SQLException, IOException {
-    if (condition.getPrecision() == Precision.HOURS
-      || condition.getPrecision() == Precision.MINUTES
-      || condition.getPrecision() == Precision.DAYS) {
-
-      String metricName = rs.getString("METRIC_NAME");
-      List<Function> functions = metricFunctions.get(metricName);
+  /**
+   * Apply aggregate function to the result if supplied else get precision
+   * or aggregate data with default function applied.
+   */
+  private void appendMetricFromResultSet(TimelineMetrics metrics, Condition condition,
+                                         Map<String, List<Function>> metricFunctions,
+                                         ResultSet rs) throws SQLException, IOException {
+    String metricName = rs.getString("METRIC_NAME");
+    List<Function> functions = metricFunctions.get(metricName);
 
+    // Apply aggregation function if present
+    if (functions != null && !functions.isEmpty()) {
+      if (functions.size() > 1) {
+        throw new IllegalArgumentException("Multiple aggregate functions not supported.");
+      }
       for (Function f : functions) {
-        SingleValuedTimelineMetric metric = getAggregatedTimelineMetricFromResultSet(rs, f);
+        SingleValuedTimelineMetric metric =
+          TIMELINE_METRIC_READ_HELPER.getAggregatedTimelineMetricFromResultSet(rs, f);
 
         if (condition.isGrouped()) {
           metrics.addOrMergeTimelineMetric(metric);
@@ -600,28 +564,35 @@ public class PhoenixHBaseAccessor {
           metrics.getMetrics().add(metric.getTimelineMetric());
         }
       }
-    }
-    else {
-      TimelineMetric metric = TIMELINE_METRIC_READ_HELPER.getTimelineMetricFromResultSet(rs);
+    } else {
+      // No aggregation requested
+      if (condition.getPrecision().equals(Precision.SECONDS)) {
+        TimelineMetric metric = TIMELINE_METRIC_READ_HELPER.getTimelineMetricFromResultSet(rs);
+        if (condition.isGrouped()) {
+          metrics.addOrMergeTimelineMetric(metric);
+        } else {
+          metrics.getMetrics().add(metric);
+        }
 
-      if (condition.isGrouped()) {
-        metrics.addOrMergeTimelineMetric(metric);
       } else {
-        metrics.getMetrics().add(metric);
+        SingleValuedTimelineMetric metric =
+          TIMELINE_METRIC_READ_HELPER.getAggregatedTimelineMetricFromResultSet(rs,
+            Function.DEFAULT_VALUE_FUNCTION);
+        if (condition.isGrouped()) {
+          metrics.addOrMergeTimelineMetric(metric);
+        } else {
+          metrics.getMetrics().add(metric.getTimelineMetric());
+        }
       }
     }
   }
 
-  private void getLatestMetricRecords(
-    Condition condition, Connection conn, TimelineMetrics metrics)
-    throws SQLException, IOException {
+  private void getLatestMetricRecords(Condition condition, Connection conn,
+                                      TimelineMetrics metrics) throws SQLException, IOException {
 
     validateConditionIsNotEmpty(condition);
 
-    PreparedStatement stmt;
-
-    stmt = PhoenixTransactSQL.prepareGetLatestMetricSqlStmt(conn,
-        condition);
+    PreparedStatement stmt = PhoenixTransactSQL.prepareGetLatestMetricSqlStmt(conn, condition);
     ResultSet rs = null;
     try {
       rs = stmt.executeQuery();
@@ -1146,7 +1117,7 @@ public class PhoenixHBaseAccessor {
         stmt.setString(1, metadata.getMetricName());
         stmt.setString(2, metadata.getAppId());
         stmt.setString(3, metadata.getUnits());
-        stmt.setString(4, metadata.getType().name());
+        stmt.setString(4, metadata.getType());
         stmt.setLong(5, metadata.getSeriesStartTime());
         stmt.setBoolean(6, metadata.isSupportsAggregates());
 
@@ -1239,7 +1210,7 @@ public class PhoenixHBaseAccessor {
           metricName,
           appId,
           rs.getString("UNITS"),
-          Enums.getIfPresent(MetricType.class, rs.getString("TYPE")).or(MetricType.UNDEFINED),
+          rs.getString("TYPE"),
           rs.getLong("START_TIME"),
           rs.getBoolean("SUPPORTS_AGGREGATION")
         );

http://git-wip-us.apache.org/repos/asf/ambari/blob/58b91c84/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/aggregators/Function.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/aggregators/Function.java b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/aggregators/Function.java
index 8292657..6f408a5 100644
--- a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/aggregators/Function.java
+++ b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/aggregators/Function.java
@@ -17,6 +17,8 @@
  */
 package org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.aggregators;
 
+import java.util.Arrays;
+
 /**
  * Is used to determine metrics aggregate table.
  *
@@ -24,8 +26,7 @@ package org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline
  * @see org.apache.hadoop.yarn.server.applicationhistoryservice.webapp.TimelineWebServices#getTimelineMetrics
  */
 public class Function {
-  public static Function DEFAULT_VALUE_FUNCTION =
-    new Function(ReadFunction.VALUE, null);
+  public static Function DEFAULT_VALUE_FUNCTION = new Function(ReadFunction.VALUE, null);
   private static final String SUFFIX_SEPARATOR = "\\._";
 
   private ReadFunction readFunction = ReadFunction.VALUE;
@@ -42,7 +43,13 @@ public class Function {
     this.postProcessingFunction = ppFunction;
   }
 
-  public static Function fromMetricName(String metricName){
+  /**
+   * Segregate post processing function eg: rate from aggregate function,
+   * example: avg, in any order
+   * @param metricName metric name from request
+   * @return @Function
+   */
+  public static Function fromMetricName(String metricName) {
     // gets postprocessing, and aggregation function
     // ex. Metric._rate._avg
     String[] parts = metricName.split(SUFFIX_SEPARATOR);
@@ -50,14 +57,31 @@ public class Function {
     ReadFunction readFunction = ReadFunction.VALUE;
     PostProcessingFunction ppFunction = null;
 
-      if (parts.length == 3) {
-        ppFunction = PostProcessingFunction.getFunction(parts[1]);
-        readFunction = ReadFunction.getFunction(parts[2]);
-      } else if (parts.length == 2) {
-        ppFunction = null;
-        readFunction = ReadFunction.getFunction(parts[1]);
+    if (parts.length <= 1) {
+      return new Function(readFunction, null);
+    }
+    if (parts.length > 3) {
+      throw new IllegalArgumentException("Invalid number of functions specified.");
+    }
+
+    // Parse functions
+    boolean isSuccessful = false; // Best effort
+    for (String part : parts) {
+      if (ReadFunction.isPresent(part)) {
+        readFunction = ReadFunction.getFunction(part);
+        isSuccessful = true;
+      }
+      if (PostProcessingFunction.isPresent(part)) {
+        ppFunction = PostProcessingFunction.getFunction(part);
+        isSuccessful = true;
       }
+    }
 
+    // Throw exception if parsing failed
+    if (!isSuccessful) {
+      throw new FunctionFormatException("Could not parse provided functions: " +
+        "" + Arrays.asList(parts));
+    }
 
     return new Function(readFunction, ppFunction);
   }
@@ -113,8 +137,16 @@ public class Function {
       return suffix;
     }
 
-    public static PostProcessingFunction getFunction(String functionName) throws
-      FunctionFormatException {
+    public static boolean isPresent(String functionName) {
+      try {
+        PostProcessingFunction.valueOf(functionName.toUpperCase());
+      } catch (IllegalArgumentException e) {
+        return false;
+      }
+      return true;
+    }
+
+    public static PostProcessingFunction getFunction(String functionName) throws FunctionFormatException {
       if (functionName == null) {
         return NONE;
       }
@@ -122,8 +154,7 @@ public class Function {
       try {
         return PostProcessingFunction.valueOf(functionName.toUpperCase());
       } catch (IllegalArgumentException e) {
-        throw new FunctionFormatException("Function should be value, avg, min, " +
-          "max", e);
+        throw new FunctionFormatException("Function should be ._rate", e);
       }
     }
   }
@@ -145,8 +176,16 @@ public class Function {
       return suffix;
     }
 
-    public static ReadFunction getFunction(String functionName) throws
-      FunctionFormatException {
+    public static boolean isPresent(String functionName) {
+      try {
+        ReadFunction.valueOf(functionName.toUpperCase());
+      } catch (IllegalArgumentException e) {
+        return false;
+      }
+      return true;
+    }
+
+    public static ReadFunction getFunction(String functionName) throws FunctionFormatException {
       if (functionName == null) {
         return VALUE;
       }
@@ -154,12 +193,16 @@ public class Function {
         return ReadFunction.valueOf(functionName.toUpperCase());
       } catch (IllegalArgumentException e) {
         throw new FunctionFormatException(
-          "Function should be value, avg, min, max. Got " + functionName, e);
+          "Function should be sum, avg, min, max. Got " + functionName, e);
       }
     }
   }
 
   public static class FunctionFormatException extends IllegalArgumentException {
+    public FunctionFormatException(String message) {
+      super(message);
+    }
+
     public FunctionFormatException(String message, Throwable cause) {
       super(message, cause);
     }

http://git-wip-us.apache.org/repos/asf/ambari/blob/58b91c84/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/aggregators/TimelineMetricReadHelper.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/aggregators/TimelineMetricReadHelper.java b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/aggregators/TimelineMetricReadHelper.java
index c5e60fe..846ae92 100644
--- a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/aggregators/TimelineMetricReadHelper.java
+++ b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/aggregators/TimelineMetricReadHelper.java
@@ -18,6 +18,7 @@
 package org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.aggregators;
 
 
+import org.apache.hadoop.metrics2.sink.timeline.SingleValuedTimelineMetric;
 import org.apache.hadoop.metrics2.sink.timeline.TimelineMetric;
 import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.PhoenixHBaseAccessor;
 
@@ -46,6 +47,43 @@ public class TimelineMetricReadHelper {
     return metric;
   }
 
+  public SingleValuedTimelineMetric getAggregatedTimelineMetricFromResultSet(ResultSet rs,
+      Function f) throws SQLException, IOException {
+
+    SingleValuedTimelineMetric metric = new SingleValuedTimelineMetric(
+      rs.getString("METRIC_NAME") + f.getSuffix(),
+      rs.getString("APP_ID"),
+      rs.getString("INSTANCE_ID"),
+      rs.getString("HOSTNAME"),
+      rs.getLong("SERVER_TIME"),
+      rs.getLong("SERVER_TIME"),
+      rs.getString("UNITS")
+    );
+
+    double value;
+    switch(f.getReadFunction()){
+      case AVG:
+        value = rs.getDouble("METRIC_SUM") / rs.getInt("METRIC_COUNT");
+        break;
+      case MIN:
+        value = rs.getDouble("METRIC_MIN");
+        break;
+      case MAX:
+        value = rs.getDouble("METRIC_MAX");
+        break;
+      case SUM:
+        value = rs.getDouble("METRIC_SUM");
+        break;
+      default:
+        value = rs.getDouble("METRIC_SUM") / rs.getInt("METRIC_COUNT");
+        break;
+    }
+
+    metric.setSingleTimeseriesValue(rs.getLong("SERVER_TIME"), value);
+
+    return metric;
+  }
+
   /**
    * Returns common part of timeline metrics record without the values.
    */

http://git-wip-us.apache.org/repos/asf/ambari/blob/58b91c84/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/discovery/TimelineMetricMetadataManager.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/discovery/TimelineMetricMetadataManager.java b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/discovery/TimelineMetricMetadataManager.java
index 1c1a1dc..8e58203 100644
--- a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/discovery/TimelineMetricMetadataManager.java
+++ b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/discovery/TimelineMetricMetadataManager.java
@@ -36,7 +36,6 @@ import java.util.concurrent.ScheduledExecutorService;
 import java.util.concurrent.TimeUnit;
 import java.util.concurrent.atomic.AtomicBoolean;
 
-import static org.apache.hadoop.metrics2.sink.timeline.TimelineMetricMetadata.MetricType.UNDEFINED;
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.TimelineMetricConfiguration.DISABLE_METRIC_METADATA_MGMT;
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.TimelineMetricConfiguration.METRICS_METADATA_SYNC_INIT_DELAY;
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.TimelineMetricConfiguration.METRICS_METADATA_SYNC_SCHEDULE_DELAY;
@@ -166,8 +165,8 @@ public class TimelineMetricMetadataManager {
     return new TimelineMetricMetadata(
       timelineMetric.getMetricName(),
       timelineMetric.getAppId(),
-      timelineMetric.getType(), // Present type and unit are synonyms
-      UNDEFINED, // TODO: Add support for types in the application
+      timelineMetric.getUnits(),
+      timelineMetric.getType(),
       timelineMetric.getStartTime(),
       true
     );

http://git-wip-us.apache.org/repos/asf/ambari/blob/58b91c84/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/FunctionTest.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/FunctionTest.java b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/FunctionTest.java
index af9c6bb..46bc6f8 100644
--- a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/FunctionTest.java
+++ b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/FunctionTest.java
@@ -18,6 +18,7 @@
 package org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline;
 
 import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.aggregators.Function;
+import org.junit.Ignore;
 import org.junit.Test;
 
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.aggregators.Function.fromMetricName;
@@ -32,17 +33,20 @@ public class FunctionTest {
     Function f = fromMetricName("Metric._avg");
     assertThat(f).isEqualTo(new Function(AVG, null));
 
-
     f = fromMetricName("Metric._rate._avg");
     assertThat(f).isEqualTo(new Function(AVG, RATE));
 
     f = fromMetricName("bytes_in");
     assertThat(f).isEqualTo(Function.DEFAULT_VALUE_FUNCTION);
-  }
 
+    // Rate support without aggregates
+    f = fromMetricName("Metric._rate");
+    assertThat(f).isEqualTo(new Function(null, RATE));
+  }
 
+  @Ignore // If unknown function: behavior is best effort query without function
   @Test(expected = Function.FunctionFormatException.class)
   public void testNotAFunction() throws Exception {
-    Function f = fromMetricName("bytes._not._afunction");
+    fromMetricName("bytes._not._afunction");
   }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/58b91c84/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/HBaseTimelineMetricStoreTest.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/HBaseTimelineMetricStoreTest.java b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/HBaseTimelineMetricStoreTest.java
index 8233b3f..512a7db 100644
--- a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/HBaseTimelineMetricStoreTest.java
+++ b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/HBaseTimelineMetricStoreTest.java
@@ -17,12 +17,15 @@
  */
 package org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline;
 
+import junit.framework.Assert;
 import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.aggregators.Function;
 import org.junit.Test;
 
 import java.util.Arrays;
 import java.util.HashMap;
 import java.util.List;
+import java.util.Map;
+import java.util.TreeMap;
 
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.aggregators.Function.ReadFunction.AVG;
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.aggregators.Function.PostProcessingFunction.RATE;
@@ -32,8 +35,7 @@ public class HBaseTimelineMetricStoreTest {
 
   public static final String MEM_METRIC = "mem";
   public static final String BYTES_IN_METRIC = "bytes_in";
-  public static final String BYTES_NOT_AFUNCTION_METRIC = "bytes._not" +
-    "._afunction";
+  public static final String BYTES_NOT_AFUNCTION_METRIC = "bytes._not._afunction";
 
   @Test
   public void testParseMetricNamesToAggregationFunctions() throws Exception {
@@ -45,8 +47,8 @@ public class HBaseTimelineMetricStoreTest {
       BYTES_NOT_AFUNCTION_METRIC);
 
     //when
-    HashMap<String, List<Function>> mfm = HBaseTimelineMetricStore
-      .parseMetricNamesToAggregationFunctions(metricNames);
+    HashMap<String, List<Function>> mfm =
+      HBaseTimelineMetricStore.parseMetricNamesToAggregationFunctions(metricNames);
 
     //then
     assertThat(mfm).hasSize(3)
@@ -63,4 +65,25 @@ public class HBaseTimelineMetricStoreTest {
       .contains(Function.DEFAULT_VALUE_FUNCTION);
 
   }
+
+  @Test
+  public void testRateCalculationOnMetricsWithEqualValues() throws Exception {
+    Map<Long, Double> metricValues = new TreeMap<>();
+    metricValues.put(1454016368371L, 1011.25);
+    metricValues.put(1454016428371L, 1011.25);
+    metricValues.put(1454016488371L, 1011.25);
+    metricValues.put(1454016548371L, 1011.25);
+    metricValues.put(1454016608371L, 1011.25);
+    metricValues.put(1454016668371L, 1011.25);
+    metricValues.put(1454016728371L, 1011.25);
+
+    // Calculate rate
+    Map<Long, Double> rates = HBaseTimelineMetricStore.updateValuesAsRate(new TreeMap<>(metricValues));
+
+    // Make sure rate is zero
+    for (Map.Entry<Long, Double> rateEntry : rates.entrySet()) {
+      Assert.assertEquals("Rate should be zero, key = " + rateEntry.getKey()
+          + ", value = " + rateEntry.getValue(), 0.0, rateEntry.getValue());
+    }
+  }
 }


[08/16] ambari git commit: AMBARI-14525. Longer cluster names extend beyond the frame in the Ambari Admin view. (alexantonenko)

Posted by nc...@apache.org.
AMBARI-14525. Longer cluster names extend beyond the frame in the Ambari Admin view. (alexantonenko)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/2be0ec36
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/2be0ec36
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/2be0ec36

Branch: refs/heads/branch-dev-patch-upgrade
Commit: 2be0ec36231e7fd07216066d53e1b973b4dc8b78
Parents: f6a75a6
Author: Alex Antonenko <hi...@gmail.com>
Authored: Wed Feb 3 14:52:59 2016 +0200
Committer: Alex Antonenko <hi...@gmail.com>
Committed: Wed Feb 3 14:53:19 2016 +0200

----------------------------------------------------------------------
 .../app/scripts/controllers/NavbarCtrl.js         |  9 +++++++++
 .../resources/ui/admin-web/app/styles/main.css    | 18 +++++++++++++++++-
 .../ui/admin-web/app/views/leftNavbar.html        |  4 ++--
 3 files changed, 28 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/2be0ec36/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/NavbarCtrl.js
----------------------------------------------------------------------
diff --git a/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/NavbarCtrl.js b/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/NavbarCtrl.js
index 319a8f2..f1eb5c5 100644
--- a/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/NavbarCtrl.js
+++ b/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/NavbarCtrl.js
@@ -56,6 +56,15 @@ angular.module('ambariAdminConsole')
     $scope.editCluster.editingName  = !$scope.editCluster.editingName;
   };
 
+  $scope.clusterDisplayName = function () {
+    var name="";
+    if($scope.cluster && $scope.cluster.Clusters)
+    {
+       name = $scope.cluster.Clusters.cluster_name;
+    }
+    return name.length > 13 ? name.substr(0, 13) + "..." : name;
+  };
+
   $scope.confirmClusterNameChange = function() {
     ConfirmationModal.show(
       $t('common.clusterNameChangeConfirmation.title'),

http://git-wip-us.apache.org/repos/asf/ambari/blob/2be0ec36/ambari-admin/src/main/resources/ui/admin-web/app/styles/main.css
----------------------------------------------------------------------
diff --git a/ambari-admin/src/main/resources/ui/admin-web/app/styles/main.css b/ambari-admin/src/main/resources/ui/admin-web/app/styles/main.css
index 0474e9c..7a91296 100644
--- a/ambari-admin/src/main/resources/ui/admin-web/app/styles/main.css
+++ b/ambari-admin/src/main/resources/ui/admin-web/app/styles/main.css
@@ -1052,6 +1052,22 @@ button.btn.btn-xs{
   line-height: 1.5;
   border-radius: 3px;
 }
+
+.clusterDisplayName {
+  display:inline-block;
+  width:90%;
+  word-wrap:break-word;
+}
+
+.renameCluster {
+  display:inline-block;
+  width:10%;
+}
+
+.default-body {
+  word-wrap:break-word;
+}
+
 .alert-info {
   background-color: #E6F1F6;
   border-color: #D2D9DD;
@@ -1095,7 +1111,7 @@ button.btn.btn-xs{
   box-sizing: border-box;
 }
 .ambariAlert .content {
-  display: inline-block;
+  word-wrap: break-word;
   padding-right: 10px;
 }
 .ambariAlert .icon-box {

http://git-wip-us.apache.org/repos/asf/ambari/blob/2be0ec36/ambari-admin/src/main/resources/ui/admin-web/app/views/leftNavbar.html
----------------------------------------------------------------------
diff --git a/ambari-admin/src/main/resources/ui/admin-web/app/views/leftNavbar.html b/ambari-admin/src/main/resources/ui/admin-web/app/views/leftNavbar.html
index 17f8f0b..3f01e38 100644
--- a/ambari-admin/src/main/resources/ui/admin-web/app/views/leftNavbar.html
+++ b/ambari-admin/src/main/resources/ui/admin-web/app/views/leftNavbar.html
@@ -21,10 +21,10 @@
     <div class="panel-body">
       <div class="cluster-section" ng-show="cluster">
         <div id="cluster-name"  ng-switch on="editCluster.editingName">
-          <h5 ng-switch-when="false">{{cluster.Clusters.cluster_name}}
+          <h5 ng-switch-when="false"><div title={{cluster.Clusters.cluster_name}} class="clusterDisplayName">{{clusterDisplayName()}}</div>
             <i ng-show="cluster.Clusters.provisioning_state == 'INSTALLED'" 
                ng-click="toggleEditName()" 
-               class="glyphicon glyphicon-edit pull-right edit-cluster-name" tooltip="{{'common.renameCluster' | translate}}">
+               class="glyphicon glyphicon-edit pull-right edit-cluster-name renameCluster" tooltip="{{'common.renameCluster' | translate}}">
             </i>
           </h5>
 


[11/16] ambari git commit: AMBARI-14899. UI part of rename Admin Setting API (alexantonenko)

Posted by nc...@apache.org.
AMBARI-14899. UI part of rename Admin Setting API (alexantonenko)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/079a5b3b
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/079a5b3b
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/079a5b3b

Branch: refs/heads/branch-dev-patch-upgrade
Commit: 079a5b3b129eb217cbded5b0cc4859a0a62db86e
Parents: 1d9f1bb
Author: Alex Antonenko <hi...@gmail.com>
Authored: Wed Feb 3 17:17:33 2016 +0200
Committer: Alex Antonenko <hi...@gmail.com>
Committed: Wed Feb 3 18:03:39 2016 +0200

----------------------------------------------------------------------
 .../controllers/loginActivities/LoginMessageMainCtrl.js   | 10 +++++-----
 ambari-web/app/assets/data/settings/motd.json             |  4 ++--
 ambari-web/app/router.js                                  |  2 +-
 ambari-web/app/utils/ajax/ajax.js                         |  2 +-
 4 files changed, 9 insertions(+), 9 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/079a5b3b/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/loginActivities/LoginMessageMainCtrl.js
----------------------------------------------------------------------
diff --git a/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/loginActivities/LoginMessageMainCtrl.js b/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/loginActivities/LoginMessageMainCtrl.js
index 88c4d27..af78dde 100644
--- a/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/loginActivities/LoginMessageMainCtrl.js
+++ b/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/loginActivities/LoginMessageMainCtrl.js
@@ -25,11 +25,11 @@ angular.module('ambariAdminConsole')
     $scope.status = false;
     $scope.motdExists = false;
     $scope.text = "";
-    $scope.buttonText = "Ok";
+    $scope.buttonText = "OK";
     $scope.submitDisabled = true;
 
-    $http.get('/api/v1/admin-settings/motd').then(function (res) {
-      var response = JSON.parse(res.data.AdminSettings.content);
+    $http.get('/api/v1/settings/motd').then(function (res) {
+      var response = JSON.parse(res.data.Settings.content);
       $scope.text = response.text ? response.text : "";
       $scope.buttonText = response.button ? response.button : "";
       $scope.status = response.status && response.status == "true" ? true : false;
@@ -53,7 +53,7 @@ angular.module('ambariAdminConsole')
     $scope.saveLoginMsg = function(targetUrl) {
       var method = $scope.motdExists ? 'PUT' : 'POST';
       var data = {
-        'AdminSettings' : {
+        'Settings' : {
           'content' : '{"text":"' + $scope.text + '", "button":"' + $scope.buttonText + '", "status":"' + $scope.status + '"}',
           'name' : 'motd',
           'setting_type' : 'ambari-server'
@@ -64,7 +64,7 @@ angular.module('ambariAdminConsole')
         $scope.submitDisabled = true;
         return $http({
           method: method,
-          url: '/api/v1/admin-settings/' + ($scope.motdExists ? 'motd' : ''),
+          url: '/api/v1/settings/' + ($scope.motdExists ? 'motd' : ''),
           data: data
         }).then(function successCallback() {
           $scope.motdExists = true;

http://git-wip-us.apache.org/repos/asf/ambari/blob/079a5b3b/ambari-web/app/assets/data/settings/motd.json
----------------------------------------------------------------------
diff --git a/ambari-web/app/assets/data/settings/motd.json b/ambari-web/app/assets/data/settings/motd.json
index c051a90..64eb411 100644
--- a/ambari-web/app/assets/data/settings/motd.json
+++ b/ambari-web/app/assets/data/settings/motd.json
@@ -1,6 +1,6 @@
 {
-  "href" : "/api/v1/admin-settings/motd",
-  "AdminSettings" : {
+  "href" : "/api/v1/settings/motd",
+  "Settings" : {
     "content" : "{\"text\":\"You are using test mode\", \"button\":\"\", \"status\":\"true\"}",
     "name" : "motd",
     "setting_type" : "ambari-server",

http://git-wip-us.apache.org/repos/asf/ambari/blob/079a5b3b/ambari-web/app/router.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/router.js b/ambari-web/app/router.js
index 08df9c5..c7cba27 100644
--- a/ambari-web/app/router.js
+++ b/ambari-web/app/router.js
@@ -363,7 +363,7 @@ App.Router = Em.Router.extend({
    * @param {object} data
    */
   showLoginMessage: function (data){
-    var response = JSON.parse(data.AdminSettings.content),
+    var response = JSON.parse(data.Settings.content),
       text = response.text ? response.text : "",
       buttonText = response.button ? response.button : Em.I18n.t('ok'),
       status = response.status && response.status == "true" ? true : false;

http://git-wip-us.apache.org/repos/asf/ambari/blob/079a5b3b/ambari-web/app/utils/ajax/ajax.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/utils/ajax/ajax.js b/ambari-web/app/utils/ajax/ajax.js
index 9cd54d2..5ec03d4 100644
--- a/ambari-web/app/utils/ajax/ajax.js
+++ b/ambari-web/app/utils/ajax/ajax.js
@@ -2089,7 +2089,7 @@ var urls = {
     'mock': '/data/clusters/info.json'
   },
   'router.login.message': {
-    'real': '/admin-settings/motd',
+    'real': '/settings/motd',
     'mock': '/data/settings/motd.json'
   },
   'router.logoff': {


[09/16] ambari git commit: AMBARI-14897 Rolling Upgrade: upgrade group status not updated. (atkach)

Posted by nc...@apache.org.
AMBARI-14897 Rolling Upgrade: upgrade group status not updated. (atkach)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/18463c86
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/18463c86
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/18463c86

Branch: refs/heads/branch-dev-patch-upgrade
Commit: 18463c86afc348d985c8638cd4542201372998e3
Parents: 2be0ec3
Author: Andrii Tkach <at...@hortonworks.com>
Authored: Wed Feb 3 12:50:07 2016 +0200
Committer: Andrii Tkach <at...@hortonworks.com>
Committed: Wed Feb 3 16:54:02 2016 +0200

----------------------------------------------------------------------
 .../main/admin/stack_and_upgrade_controller.js     |  2 ++
 ambari-web/app/models/upgrade_entity.js            |  2 +-
 .../admin/stack_and_upgrade_controller_test.js     | 17 +++++++++++++++++
 3 files changed, 20 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/18463c86/ambari-web/app/controllers/main/admin/stack_and_upgrade_controller.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/controllers/main/admin/stack_and_upgrade_controller.js b/ambari-web/app/controllers/main/admin/stack_and_upgrade_controller.js
index 16a58d6..c226059 100644
--- a/ambari-web/app/controllers/main/admin/stack_and_upgrade_controller.js
+++ b/ambari-web/app/controllers/main/admin/stack_and_upgrade_controller.js
@@ -377,10 +377,12 @@ App.MainAdminStackAndUpgradeController = Em.Controller.extend(App.LocalStorage,
       //update existed entities with new data
       oldData.upgradeGroups.forEach(function (oldGroup) {
         oldGroup.set('status', groupsMap[oldGroup.get('group_id')].status);
+        oldGroup.set('display_status', groupsMap[oldGroup.get('group_id')].display_status);
         oldGroup.set('progress_percent', groupsMap[oldGroup.get('group_id')].progress_percent);
         oldGroup.set('completed_task_count', groupsMap[oldGroup.get('group_id')].completed_task_count);
         oldGroup.upgradeItems.forEach(function (item) {
           item.set('status', itemsMap[item.get('stage_id')].status);
+          item.set('display_status', itemsMap[item.get('stage_id')].display_status);
           item.set('progress_percent', itemsMap[item.get('stage_id')].progress_percent);
         });
         var hasExpandableItems = oldGroup.upgradeItems.some(function (item) {

http://git-wip-us.apache.org/repos/asf/ambari/blob/18463c86/ambari-web/app/models/upgrade_entity.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/models/upgrade_entity.js b/ambari-web/app/models/upgrade_entity.js
index 70d1840..7084185 100644
--- a/ambari-web/app/models/upgrade_entity.js
+++ b/ambari-web/app/models/upgrade_entity.js
@@ -87,5 +87,5 @@ App.upgradeEntity = Em.Object.extend({
     if (this.get('type') === 'GROUP') {
       return !this.get('isActive') && this.get('hasExpandableItems') ? 'SUBITEM_FAILED' : this.get('display_status') || this.get('status');
     }
-  }.property('isExpandableGroup')
+  }.property('isExpandableGroup', 'display_status', 'status')
 });
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/18463c86/ambari-web/test/controllers/main/admin/stack_and_upgrade_controller_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/controllers/main/admin/stack_and_upgrade_controller_test.js b/ambari-web/test/controllers/main/admin/stack_and_upgrade_controller_test.js
index 508e2dd..1315477 100644
--- a/ambari-web/test/controllers/main/admin/stack_and_upgrade_controller_test.js
+++ b/ambari-web/test/controllers/main/admin/stack_and_upgrade_controller_test.js
@@ -635,6 +635,7 @@ describe('App.MainAdminStackAndUpgradeController', function() {
               UpgradeGroup: {
                 group_id: 1,
                 status: 'COMPLETED',
+                display_status: 'COMPLETED',
                 progress_percent: 100,
                 completed_task_count: 3
               },
@@ -643,6 +644,7 @@ describe('App.MainAdminStackAndUpgradeController', function() {
                   UpgradeItem: {
                     stage_id: 1,
                     status: 'COMPLETED',
+                    display_status: 'COMPLETED',
                     progress_percent: 100
                   }
                 }
@@ -652,6 +654,7 @@ describe('App.MainAdminStackAndUpgradeController', function() {
               UpgradeGroup: {
                 group_id: 2,
                 status: 'ABORTED',
+                display_status: 'ABORTED',
                 progress_percent: 50,
                 completed_task_count: 1
               },
@@ -660,6 +663,7 @@ describe('App.MainAdminStackAndUpgradeController', function() {
                   UpgradeItem: {
                     stage_id: 2,
                     status: 'ABORTED',
+                    display_status: 'ABORTED',
                     progress_percent: 99
                   }
                 },
@@ -667,6 +671,7 @@ describe('App.MainAdminStackAndUpgradeController', function() {
                   UpgradeItem: {
                     stage_id: 3,
                     status: 'PENDING',
+                    display_status: 'PENDING',
                     progress_percent: 0
                   }
                 }
@@ -683,6 +688,9 @@ describe('App.MainAdminStackAndUpgradeController', function() {
         it('status is COMPLETED', function () {
           expect(groups[0].get('status')).to.equal('COMPLETED');
         });
+        it('display_status is COMPLETED', function () {
+          expect(groups[0].get('display_status')).to.equal('COMPLETED');
+        });
         it('progress_percent is 100', function () {
           expect(groups[0].get('progress_percent')).to.equal(100);
         });
@@ -692,6 +700,9 @@ describe('App.MainAdminStackAndUpgradeController', function() {
         it('upgradeItems.0.status is COMPLETED', function () {
           expect(groups[0].get('upgradeItems')[0].get('status')).to.equal('COMPLETED');
         });
+        it('upgradeItems.0.display_status is COMPLETED', function () {
+          expect(groups[0].get('upgradeItems')[0].get('display_status')).to.equal('COMPLETED');
+        });
         it('upgradeItems.0.progress_percent is 100', function () {
           expect(groups[0].get('upgradeItems')[0].get('progress_percent')).to.equal(100);
         });
@@ -704,6 +715,9 @@ describe('App.MainAdminStackAndUpgradeController', function() {
         it('status is ABORTED', function () {
           expect(groups[1].get('status')).to.equal('ABORTED');
         });
+        it('display_status is ABORTED', function () {
+          expect(groups[1].get('display_status')).to.equal('ABORTED');
+        });
         it('progress_percent is 50', function () {
           expect(groups[1].get('progress_percent')).to.equal(50);
         });
@@ -713,6 +727,9 @@ describe('App.MainAdminStackAndUpgradeController', function() {
         it('upgradeItems.[].status = ["ABORTED", "PENDING"]', function () {
           expect(groups[1].get('upgradeItems').mapProperty('status')).to.eql(['ABORTED', 'PENDING']);
         });
+        it('upgradeItems.[].display_status = ["ABORTED", "PENDING"]', function () {
+          expect(groups[1].get('upgradeItems').mapProperty('display_status')).to.eql(['ABORTED', 'PENDING']);
+        });
         it('upgradeItems.[].progress_percent = [99, 0]', function () {
           expect(groups[1].get('upgradeItems').mapProperty('progress_percent')).to.eql([99, 0]);
         });


[03/16] ambari git commit: AMBARI-14879 Alerts page pagination issue.(ababiichuk)

Posted by nc...@apache.org.
AMBARI-14879 Alerts page pagination issue.(ababiichuk)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/94c13eee
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/94c13eee
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/94c13eee

Branch: refs/heads/branch-dev-patch-upgrade
Commit: 94c13eee7f1536d343586ad3e5519961be57a356
Parents: aab6d88
Author: ababiichuk <ab...@hortonworks.com>
Authored: Tue Feb 2 19:57:26 2016 +0200
Committer: ababiichuk <ab...@hortonworks.com>
Committed: Tue Feb 2 20:55:20 2016 +0200

----------------------------------------------------------------------
 ambari-web/app/views/common/table_view.js | 16 +++++++++++-----
 1 file changed, 11 insertions(+), 5 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/94c13eee/ambari-web/app/views/common/table_view.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views/common/table_view.js b/ambari-web/app/views/common/table_view.js
index 386a2aa..f6ccf6c 100644
--- a/ambari-web/app/views/common/table_view.js
+++ b/ambari-web/app/views/common/table_view.js
@@ -75,12 +75,18 @@ App.TableView = Em.View.extend(App.UserPref, {
   willInsertElement: function () {
     var self = this;
     var name = this.get('controller.name');
-    if (!this.get('displayLength') && this.get('state') !== "inBuffer") {
-      if (App.db.getDisplayLength(name)) {
-        self.set('displayLength', App.db.getDisplayLength(name));
-        Em.run.next(function () {
+    if (!this.get('displayLength')) {
+      var displayLength = App.db.getDisplayLength(name);
+      if (displayLength) {
+        if (this.get('state') !== "inBuffer") {
+          self.set('displayLength', displayLength);
           self.initFilters();
-        });
+        } else {
+          Em.run.next(function () {
+            self.set('displayLength', displayLength);
+            self.initFilters();
+          });
+        }
       } else {
         if (!$.mocho) {
           this.getUserPref(this.displayLengthKey()).complete(function () {


[12/16] ambari git commit: AMBARI-14900. Ambari Admin: Create LDAP Setup page (alexantonenko)

Posted by nc...@apache.org.
AMBARI-14900. Ambari Admin: Create LDAP Setup page (alexantonenko)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/46f6030b
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/46f6030b
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/46f6030b

Branch: refs/heads/branch-dev-patch-upgrade
Commit: 46f6030b06dc9a1e1adfa18cd5df4d9ad7456ebc
Parents: 079a5b3
Author: Alex Antonenko <hi...@gmail.com>
Authored: Wed Feb 3 17:24:11 2016 +0200
Committer: Alex Antonenko <hi...@gmail.com>
Committed: Wed Feb 3 18:03:40 2016 +0200

----------------------------------------------------------------------
 .../authentication/AuthenticationMainCtrl.js    | 147 ++++++++++-
 .../ui/admin-web/app/scripts/i18n.config.js     |  76 ++++++
 .../resources/ui/admin-web/app/styles/main.css  |  16 +-
 .../app/views/authentication/main.html          | 250 ++++++++++++++++++-
 .../ui/admin-web/app/views/users/create.html    |   2 +-
 5 files changed, 483 insertions(+), 8 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/46f6030b/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/authentication/AuthenticationMainCtrl.js
----------------------------------------------------------------------
diff --git a/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/authentication/AuthenticationMainCtrl.js b/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/authentication/AuthenticationMainCtrl.js
index a746aa3..c7b7026 100644
--- a/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/authentication/AuthenticationMainCtrl.js
+++ b/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/authentication/AuthenticationMainCtrl.js
@@ -18,5 +18,150 @@
 'use strict';
 
 angular.module('ambariAdminConsole')
-  .controller('AuthenticationMainCtrl',['$scope', function($scope) {
+  .controller('AuthenticationMainCtrl', ['$scope', '$translate', 'Alert', function ($scope, $translate, $Alert) {
+    $scope.t = $translate.instant;
+
+    $scope.isLDAPEnabled = false;
+    $scope.connectivity = {
+      trustStore: 'default',
+      trustStoreOptions: ['default', 'custom'],
+      trustStoreType: 'jks',
+      trustStoreTypeOptions: ['jks', 'jceks', 'pkcs12']
+    };
+    $scope.attributes = {
+      detection: 'auto'
+    };
+
+    $scope.isConnectivityFormInvalid = true;
+    $scope.isAutoDetectFormInvalid = true;
+    $scope.isAttributesFormInvalid = true;
+    $scope.isTestAttributesFormInvalid = false;
+
+    $scope.isRequestRunning = false;
+
+    $scope.isConnectionTestRunning = false;
+    $scope.isConnectionTestComplete = false;
+    $scope.hasConnectionTestPassed = false;
+
+    $scope.isAttributeDetectionRunning = false;
+    $scope.isAttributeDetectionComplete = false;
+    $scope.isAttributeDetectionSuccessful = false;
+
+    $scope.isTestAttributesRunning = false;
+    $scope.isTestAttributesComplete = false;
+    $scope.isTestAttributesSuccessful = false;
+
+    $scope.isSaving = false;
+    $scope.isSavingComplete = false;
+    $scope.isSavingSuccessful = false;
+
+    $scope.isTestAttributesFormShown = false;
+
+    $scope.toggleAuthentication = function () {
+      $scope.isConnectionTestRunning = false;
+      $scope.isConnectionTestComplete = false;
+      $scope.hasConnectionTestPassed = false;
+    };
+
+    $scope.testConnection = function () {
+      $scope.isConnectionTestRunning = true;
+      $scope.isConnectionTestComplete = false;
+      $scope.isAttributeDetectionRunning = false;
+      $scope.isAttributeDetectionComplete = false;
+      $scope.isAttributeDetectionSuccessful = false;
+
+      // TODO replace mock with test connection request when API is available
+      setTimeout(function (prevValue) {
+        $scope.isConnectionTestRunning = false;
+        $scope.isConnectionTestComplete = true;
+        $scope.hasConnectionTestPassed = !prevValue;
+      }, 1000, $scope.hasConnectionTestPassed);
+      $scope.hasConnectionTestPassed = false;
+    };
+
+    $scope.detectAttributes = function () {
+      $scope.isAttributeDetectionRunning = true;
+      $scope.isAttributeDetectionComplete = false;
+
+      // TODO replace mock with attributes detection request when API is available
+      setTimeout(function (prevValue) {
+        $scope.isAttributeDetectionRunning = false;
+        $scope.isAttributeDetectionComplete = true;
+        $scope.isAttributeDetectionSuccessful = !prevValue;
+        if ($scope.isAttributeDetectionSuccessful) {
+          var form = $scope.attributes;
+          form.userObjClass = 'person';
+          form.userNameAttr = 'sAMAccountName';
+          form.groupObjClass = 'group';
+          form.groupNameAttr = 'cn';
+          form.groupMemberAttr = 'member';
+          form.distinguishedNameAttr = 'distinguishedName';
+        }
+      }, 1000, $scope.isAttributeDetectionSuccessful);
+
+      $scope.isAttributeDetectionSuccessful = false;
+    };
+
+    $scope.showTestAttributesForm = function () {
+      $scope.isTestAttributesFormShown = true;
+    };
+
+    $scope.testAttributes = function () {
+      $scope.isTestAttributesRunning = true;
+      $scope.isTestAttributesComplete = false;
+
+      // TODO replace mock with test attributes request when API is available
+      setTimeout(function (prevValue) {
+        $scope.isTestAttributesRunning = false;
+        $scope.isTestAttributesComplete = true;
+        $scope.isTestAttributesSuccessful = !prevValue;
+        if ($scope.isTestAttributesSuccessful) {
+          $scope.attributes.availableGroups = ['HadoopOps', 'HadoopOpsDFW', 'AmbariAdmins', 'ExchangeAdmins', 'AmbariUsers', 'ExchangeUsers'];
+        }
+      }, 1000, $scope.isTestAttributesSuccessful);
+      $scope.isTestAttributesSuccessful = false;
+    };
+
+    $scope.save = function () {
+      $scope.isSaving = true;
+      $scope.isSavingComplete = false;
+      // TODO replace mock with save request when API is available
+      setTimeout(function (prevValue) {
+        $scope.isSaving = false;
+        $scope.isSavingComplete = true;
+        $scope.isSavingSuccessful = !prevValue;
+        if ($scope.isSavingSuccessful) {
+          $Alert.success('Settings saved');
+        } else {
+          $Alert.error('Saving failed', '500 Error');
+        }
+      }, 1000, $scope.isSavingSuccessful);
+      $scope.isSavingSuccessful = false;
+    };
+
+    $scope.$watch('connectivity', function (form, oldForm, scope) {
+      scope.isConnectivityFormInvalid = !(form.host && form.port
+        && (form.trustStore === 'default' || form.trustStorePath && form.trustStorePassword)
+        && form.dn && form.bindPassword);
+    }, true);
+
+    $scope.$watch('attributes', function (form, oldForm, scope) {
+      scope.isAutoDetectFormInvalid = !(form.userSearch && form.groupSearch);
+      scope.isAttributesFormInvalid = !(form.userObjClass && form.userNameAttr && form.groupObjClass
+        && form.groupNameAttr && form.groupMemberAttr && form.distinguishedNameAttr
+        && (form.detection === 'auto' || form.userSearchManual && form.groupSearchManual));
+      scope.isTestAttributesFormInvalid = !(form.username && form.password);
+    }, true);
+
+    $scope.$watch('attributes.detection', function (newValue, oldValue, scope) {
+      scope.isTestAttributesFormShown = false;
+      scope.isAttributeDetectionComplete = false;
+      scope.isAttributeDetectionSuccessful = false;
+    });
+
+    $scope.$watch(function (scope) {
+      return scope.isConnectionTestRunning || scope.isAttributeDetectionRunning || scope.isTestAttributesRunning || scope.isSaving;
+    }, function (newValue, oldValue, scope) {
+      scope.isRequestRunning = newValue;
+    });
 }]);

http://git-wip-us.apache.org/repos/asf/ambari/blob/46f6030b/ambari-admin/src/main/resources/ui/admin-web/app/scripts/i18n.config.js
----------------------------------------------------------------------
diff --git a/ambari-admin/src/main/resources/ui/admin-web/app/scripts/i18n.config.js b/ambari-admin/src/main/resources/ui/admin-web/app/scripts/i18n.config.js
index c1b9d88..086bc13 100644
--- a/ambari-admin/src/main/resources/ui/admin-web/app/scripts/i18n.config.js
+++ b/ambari-admin/src/main/resources/ui/admin-web/app/scripts/i18n.config.js
@@ -320,6 +320,82 @@ angular.module('ambariAdminConsole')
         'versionUpdateError': 'Version update error',
         'versionDeleteError': 'Version delete error'
       }
+    },
+
+    'authentication': {
+      'description': 'Ambari supports authenticating against local Ambari users created and stored in the Ambari Database, or authenticating against a LDAP server:',
+      'ldap': 'LDAP Authentication',
+      'on': 'On',
+      'off': 'Off',
+
+      'connectivity': {
+        'title': 'LDAP Connectivity Configuration',
+        'host': 'LDAP Server Host',
+        'port': 'LDAP Server Port',
+        'ssl': 'Use SSL?',
+        'trustStore': {
+          'label': 'Trust Store',
+          'options': {
+            'default': 'JDK Default',
+            'custom': 'Custom'
+          }
+        },
+        'trustStorePath': 'Trust Store Path',
+        'trustStoreType': {
+          'label': 'Trust Store Type',
+          'options': {
+            'jks': 'JKS',
+            'jceks': 'JCEKS',
+            'pkcs12': 'PKCS12'
+          }
+        },
+        'trustStorePassword': 'Trust Store Password',
+        'dn': 'Bind DN',
+        'bindPassword': 'Bind Password',
+
+        'controls': {
+          'testConnection': 'Test Connection'
+        }
+      },
+
+      'attributes': {
+        'title': 'LDAP Attribute Configuration',
+        'detection': {
+          'label': 'Identifying the proper attributes to be used when authenticating and looking up users and groups can be specified manually, or automatically detected. Please choose:',
+          'options': {
+            'manual': 'Define Attributes Manually',
+            'auto': 'Auto-Detect Attributes'
+          }
+        },
+        'userSearch': 'User Search Base',
+        'groupSearch': 'Group Search Base',
+        'detected': 'The following attributes were detected, please review and Test Attributes to ensure their accuracy.',
+        'userObjClass': 'User Object Class',
+        'userNameAttr': 'User Name Attribute',
+        'groupObjClass': 'Group Object Class',
+        'groupNameAttr': 'Group Name Attribute',
+        'groupMemberAttr': 'Group Member Attribute',
+        'distinguishedNameAttr': 'Distinguished Name Attribute',
+        'test': {
+          'description': 'To quickly test the chosen attributes click the button below. During this process you can specify a test user name and password and Ambari will attempt to authenticate and retrieve group membership information',
+          'username': 'Test Username',
+          'password': 'Test Password'
+        },
+        'groupsList': 'List of Groups',
+
+        'controls': {
+          'autoDetect': 'Perform Auto-Detection',
+          'testAttrs': 'Test Attributes'
+        },
+
+        'alerts': {
+          'successfulAuth': 'Successful Authentication'
+        }
+      },
+
+      'controls': {
+        'test': 'Test'
+      }
     }
   });
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/46f6030b/ambari-admin/src/main/resources/ui/admin-web/app/styles/main.css
----------------------------------------------------------------------
diff --git a/ambari-admin/src/main/resources/ui/admin-web/app/styles/main.css b/ambari-admin/src/main/resources/ui/admin-web/app/styles/main.css
index 7a91296..cc57fa3 100644
--- a/ambari-admin/src/main/resources/ui/admin-web/app/styles/main.css
+++ b/ambari-admin/src/main/resources/ui/admin-web/app/styles/main.css
@@ -394,7 +394,7 @@ a.gotoinstance{
   font-size: 16px;
 }
 
-.user-edit-panel .ats-switch span.switch-right , .create-user-form .ats-switch span.switch-right{
+.user-edit-panel .ats-switch span.switch-right , .create-user-form .ats-switch span.switch-right, .enable-ldap .ats-switch span.switch-right {
   background-color: #da4f49;
   color: white;
 }
@@ -1147,14 +1147,14 @@ button.btn.btn-xs{
 .ambariAlert.error {
   border-left: 3px solid #ef2427;
 }
-.ambariAlert.error .icon-box {
+.ambariAlert.error .icon-box, .test-ldap-icon.fa-times-circle {
   color: #ef2427;
 }
 
 .ambariAlert.success {
   border-left: 3px solid #82c534;
 }
-.ambariAlert.success .icon-box {
+.ambariAlert.success .icon-box, .test-ldap-icon.fa-check-circle {
   color: #82c534;
 }
 
@@ -1373,6 +1373,14 @@ accordion .panel-group .panel{
 }
 
 thead.view-permission-header > tr > th {
-  border-top: 0px;
+  border-top: 0;
   padding-top: 40px;
 }
+
+.enable-ldap input[type="checkbox"] {
+  margin-top: 10px;
+}
+
+.test-ldap-icon.ng-hide-add-active, .test-ldap-icon.ng-hide-remove {
+  display: inline-block!important;
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/46f6030b/ambari-admin/src/main/resources/ui/admin-web/app/views/authentication/main.html
----------------------------------------------------------------------
diff --git a/ambari-admin/src/main/resources/ui/admin-web/app/views/authentication/main.html b/ambari-admin/src/main/resources/ui/admin-web/app/views/authentication/main.html
index 7743d1e..8fa1429 100644
--- a/ambari-admin/src/main/resources/ui/admin-web/app/views/authentication/main.html
+++ b/ambari-admin/src/main/resources/ui/admin-web/app/views/authentication/main.html
@@ -16,11 +16,257 @@
 * limitations under the License.
 -->
 
-<div class="users-pane">
+<div class="users-pane enable-ldap">
+
   <div class="clearfix">
     <ol class="breadcrumb pull-left">
       <li class="active">{{'common.authentication' | translate}}</li>
     </ol>
   </div>
   <hr>
-</div>
+
+  <div class="form-horizontal">
+    <div class="form-group col-sm-12">{{'authentication.description' | translate}}</div>
+    <div class="form-group">
+      <label class="control-label col-sm-4">{{'authentication.ldap' | translate}}</label>
+      <span class="col-sm-8">
+        <toggle-switch model="isLDAPEnabled" ng-disabled="isRequestRunning" on-label="{{'authentication.on' | translate}}" off-label="{{'authentication.off' | translate}}" class="switch-primary" data-off-color="danger" on-change="toggleAuthentication()"></toggle-switch>
+      </span>
+    </div>
+  </div>
+
+  <div ng-show="isLDAPEnabled">
+
+    <div class="clearfix">
+      <ol class="breadcrumb pull-left">
+        <li class="active">{{'authentication.connectivity.title' | translate}}</li>
+      </ol>
+    </div>
+    <hr>
+
+    <form class="form-horizontal" ng-submit="testConnection()">
+      <div class="form-group">
+        <label for="host" class="control-label col-sm-4">{{'authentication.connectivity.host' | translate}}</label>
+        <div class="col-sm-8">
+          <input type="text" class="form-control" id="host" ng-model="connectivity.host">
+        </div>
+      </div>
+      <div class="form-group">
+        <label for="port" class="control-label col-sm-4">{{'authentication.connectivity.port' | translate}}</label>
+        <div class="col-sm-8">
+          <input type="text" class="form-control" id="port" ng-model="connectivity.port">
+        </div>
+      </div>
+      <div class="form-group">
+        <label for="ssl" class="control-label col-sm-4">{{'authentication.connectivity.ssl' | translate}}</label>
+        <div class="col-sm-8">
+          <input type="checkbox" id="ssl" ng-model="connectivity.ssl">
+        </div>
+      </div>
+      <div class="form-group">
+        <label for="trust-store" class="control-label col-sm-4">{{'authentication.connectivity.trustStore.label' | translate}}</label>
+        <div class="col-sm-3">
+          <select class="form-control" id="trust-store" ng-model="connectivity.trustStore" ng-options="t('authentication.connectivity.trustStore.options.' + item) for item in connectivity.trustStoreOptions"></select>
+        </div>
+      </div>
+      <div ng-show="connectivity.trustStore === 'custom'">
+        <div class="form-group">
+          <label for="trust-store-path" class="control-label col-sm-4">{{'authentication.connectivity.trustStorePath' | translate}}</label>
+          <div class="col-sm-8">
+            <input type="text" class="form-control" id="trust-store-path" ng-model="connectivity.trustStorePath">
+          </div>
+        </div>
+        <div class="form-group">
+          <label for="trust-store-type" class="control-label col-sm-4">{{'authentication.connectivity.trustStoreType.label' | translate}}</label>
+          <div class="col-sm-3">
+            <select class="form-control" id="trust-store-type" ng-model="connectivity.trustStoreType" ng-options="t('authentication.connectivity.trustStoreType.options.' + item) for item in connectivity.trustStoreTypeOptions"></select>
+          </div>
+        </div>
+        <div class="form-group">
+          <label for="trust-store-password" class="control-label col-sm-4">{{'authentication.connectivity.trustStorePassword' | translate}}</label>
+          <div class="col-sm-8">
+            <input type="password" class="form-control" id="trust-store-password" ng-model="connectivity.trustStorePassword">
+          </div>
+        </div>
+      </div>
+      <div class="form-group">
+        <label for="dn" class="control-label col-sm-4">{{'authentication.connectivity.dn' | translate}}</label>
+        <div class="col-sm-8">
+          <input type="text" class="form-control" id="dn" ng-model="connectivity.dn">
+        </div>
+      </div>
+      <div class="form-group">
+        <label for="bind-password" class="control-label col-sm-4">{{'authentication.connectivity.bindPassword' | translate}}</label>
+        <div class="col-sm-8">
+          <input type="password" class="form-control" id="bind-password" ng-model="connectivity.bindPassword">
+        </div>
+      </div>
+      <div class="form-group">
+        <div class="col-sm-offset-4 col-sm-8">
+          <button type="submit" class="btn btn-primary" ng-disabled="isConnectivityFormInvalid || isRequestRunning">{{'authentication.connectivity.controls.testConnection' | translate}}</button>
+          <i class="test-ldap-icon fa ng-class: {'fa-spin fa-spinner': isConnectionTestRunning, 'fa-check-circle': hasConnectionTestPassed, 'fa-times-circle': isConnectionTestComplete && !hasConnectionTestPassed}" ng-show="isConnectionTestRunning || isConnectionTestComplete"></i>
+        </div>
+      </div>
+    </form>
+
+    <div ng-show="hasConnectionTestPassed">
+
+      <div class="clearfix">
+        <ol class="breadcrumb pull-left">
+          <li class="active">{{'authentication.attributes.title' | translate}}</li>
+        </ol>
+      </div>
+      <hr>
+
+      <form class="form-horizontal" ng-submit="detectAttributes()">
+        <div class="form-group col-sm-12">{{'authentication.attributes.detection.label' | translate}}</div>
+        <div class="form-group">
+          <label for="manual-detection" class="col-sm-12">
+            <input type="radio" id="manual-detection" name="detection" ng-model="attributes.detection" ng-disabled="isAttributeDetectionRunning" value="manual">
+            {{'authentication.attributes.detection.options.manual' | translate}}
+          </label>
+          <label for="auto-detection" class="col-sm-12">
+            <input type="radio" id="auto-detection" name="detection" ng-model="attributes.detection" ng-disabled="isAttributeDetectionRunning" value="auto">
+            {{'authentication.attributes.detection.options.auto' | translate}}
+          </label>
+        </div>
+
+        <div ng-show="attributes.detection === 'auto'">
+
+          <div class="form-group">
+            <label for="user-search" class="control-label col-sm-4">{{'authentication.attributes.userSearch' | translate}}</label>
+            <div class="col-sm-8">
+              <input type="text" class="form-control" id="user-search" ng-model="attributes.userSearch">
+            </div>
+          </div>
+          <div class="form-group">
+            <label for="group-search" class="control-label col-sm-4">{{'authentication.attributes.groupSearch' | translate}}</label>
+            <div class="col-sm-8">
+              <input type="text" class="form-control" id="group-search" ng-model="attributes.groupSearch">
+            </div>
+          </div>
+
+          <div class="form-group">
+            <div class="col-sm-offset-4 col-sm-8">
+              <button class="btn btn-primary" ng-disabled="isAutoDetectFormInvalid || isRequestRunning">{{'authentication.attributes.controls.autoDetect' | translate}}</button>
+              <i class="test-ldap-icon fa ng-class: {'fa-spin fa-spinner': isAttributeDetectionRunning, 'fa-check-circle': isAttributeDetectionSuccessful, 'fa-times-circle': isAttributeDetectionComplete && !isAttributeDetectionSuccessful}" ng-show="isAttributeDetectionRunning || isAttributeDetectionComplete"></i>
+            </div>
+          </div>
+          <div class="form-group col-sm-12" ng-show="isAttributeDetectionComplete && isAttributeDetectionSuccessful">{{'authentication.attributes.detected' | translate}}</div>
+
+        </div>
+
+      </form>
+
+      <div ng-show="attributes.detection === 'manual' || isAttributeDetectionComplete && isAttributeDetectionSuccessful">
+
+        <form id="attributes" class="form-horizontal" ng-submit="save()">
+
+          <div class="form-group">
+            <label for="user-obj-class" class="control-label col-sm-4">{{'authentication.attributes.userObjClass' | translate}}</label>
+            <div class="col-sm-8">
+              <input type="text" class="form-control" id="user-obj-class" ng-model="attributes.userObjClass">
+            </div>
+          </div>
+          <div class="form-group">
+            <label for="user-name-attr" class="control-label col-sm-4">{{'authentication.attributes.userNameAttr' | translate}}</label>
+            <div class="col-sm-8">
+              <input type="text" class="form-control" id="user-name-attr" ng-model="attributes.userNameAttr">
+            </div>
+          </div>
+          <div class="form-group">
+            <label for="group-obj-class" class="control-label col-sm-4">{{'authentication.attributes.groupObjClass' | translate}}</label>
+            <div class="col-sm-8">
+              <input type="text" class="form-control" id="group-obj-class" ng-model="attributes.groupObjClass">
+            </div>
+          </div>
+          <div class="form-group">
+            <label for="group-name-attr" class="control-label col-sm-4">{{'authentication.attributes.groupNameAttr' | translate}}</label>
+            <div class="col-sm-8">
+              <input type="text" class="form-control" id="group-name-attr" ng-model="attributes.groupNameAttr">
+            </div>
+          </div>
+          <div class="form-group">
+            <label for="group-member-attr" class="control-label col-sm-4">{{'authentication.attributes.groupMemberAttr' | translate}}</label>
+            <div class="col-sm-8">
+              <input type="text" class="form-control" id="group-member-attr" ng-model="attributes.groupMemberAttr">
+            </div>
+          </div>
+          <div class="form-group">
+            <label for="distinguished-name-attr" class="control-label col-sm-4">{{'authentication.attributes.distinguishedNameAttr' | translate}}</label>
+            <div class="col-sm-8">
+              <input type="text" class="form-control" id="distinguished-name-attr" ng-model="attributes.distinguishedNameAttr">
+            </div>
+          </div>
+          <div ng-show="attributes.detection === 'manual'">
+            <div class="form-group">
+              <label for="user-search-manual" class="control-label col-sm-4">{{'authentication.attributes.userSearch' | translate}}</label>
+              <div class="col-sm-8">
+                <input type="text" class="form-control" id="user-search-manual" ng-model="attributes.userSearchManual">
+              </div>
+            </div>
+            <div class="form-group">
+              <label for="group-search-manual" class="control-label col-sm-4">{{'authentication.attributes.groupSearch' | translate}}</label>
+              <div class="col-sm-8">
+                <input type="text" class="form-control" id="group-search-manual" ng-model="attributes.groupSearchManual">
+              </div>
+            </div>
+          </div>
+          <div class="form-group col-sm-12">{{'authentication.attributes.test.description' | translate}}</div>
+          <div class="form-group">
+            <div class="col-sm-offset-4 col-sm-8">
+              <button type="submit" class="btn btn-primary" ng-click="showTestAttributesForm()" ng-disabled="isAttributesFormInvalid || isTestAttributesFormShown || isRequestRunning">{{'authentication.attributes.controls.testAttrs' | translate}}</button>
+            </div>
+          </div>
+        </form>
+
+        <form class="form-horizontal" ng-show="isTestAttributesFormShown" ng-submit="testAttributes()">
+          <div class="form-group">
+            <label for="username" class="control-label col-sm-4">{{'authentication.attributes.test.username' | translate}}</label>
+            <div class="col-sm-8">
+              <input type="text" class="form-control" id="username" ng-model="attributes.username">
+            </div>
+          </div>
+          <div class="form-group">
+            <label for="password" class="control-label col-sm-4">{{'authentication.attributes.test.password' | translate}}</label>
+            <div class="col-sm-8">
+              <input type="password" class="form-control" id="password" ng-model="attributes.password">
+            </div>
+          </div>
+          <div class="form-group">
+            <div class="col-sm-offset-4 col-sm-8">
+              <button type="submit" class="btn btn-primary" ng-disabled="isTestAttributesFormInvalid || isRequestRunning">{{'authentication.controls.test' | translate}}</button>
+              <i class="test-ldap-icon fa ng-class: {'fa-spin fa-spinner': isTestAttributesRunning, 'fa-times-circle': isTestAttributesComplete && !isTestAttributesSuccessful}" ng-show="isTestAttributesRunning || isTestAttributesComplete"></i>
+            </div>
+          </div>
+        </form>
+
+        <div class="form-horizontal" ng-show="isTestAttributesSuccessful">
+          <div class="form-group">
+            <span class="control-label col-sm-4">
+              {{'authentication.attributes.alerts.successfulAuth' | translate}}
+            </span>
+            <div class="col-sm-1">
+              <i class="control-label test-ldap-icon fa fa-check-circle"></i>
+            </div>
+          </div>
+          <div class="form-group">
+            <label for="groups" class="control-label col-sm-4">{{'authentication.attributes.groupsList' | translate}}</label>
+            <div class="col-sm-6">
+              <select multiple class="form-control" id="groups" form="attributes" ng-model="attributes.groups" ng-options="item for item in attributes.availableGroups"></select>
+            </div>
+          </div>
+        </div>
+
+        <div class="text-center form-group">
+          <button type="submit" form="attributes" class="btn btn-primary" ng-disabled="isAttributesFormInvalid || isRequestRunning">{{'common.controls.save' | translate}}</button>
+          <i class="test-ldap-icon fa fa-spin fa-spinner" ng-show="isSaving"></i>
+        </div>
+
+      </div>
+
+    </div>
+
+  </div>
+
+</div>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/46f6030b/ambari-admin/src/main/resources/ui/admin-web/app/views/users/create.html
----------------------------------------------------------------------
diff --git a/ambari-admin/src/main/resources/ui/admin-web/app/views/users/create.html b/ambari-admin/src/main/resources/ui/admin-web/app/views/users/create.html
index c0967fb..bc86819 100644
--- a/ambari-admin/src/main/resources/ui/admin-web/app/views/users/create.html
+++ b/ambari-admin/src/main/resources/ui/admin-web/app/views/users/create.html
@@ -25,7 +25,7 @@
     <label for="username" class="col-sm-2 control-label">{{'users.username' | translate}}</label>
     <div class="col-sm-10">
       <input type="text" id="username" class="form-control username-input" name="user_name" placeholder="{{'users.userName' | translate}}" ng-model="user.user_name" required autocomplete="off">
-      <div class="alert alert-danger top-margin" ng-show="form.user_name.$error.required && form.submitted">{{'common.alerts.fieldIsRequired' | translate}</div>
+      <div class="alert alert-danger top-margin" ng-show="form.user_name.$error.required && form.submitted">{{'common.alerts.fieldIsRequired' | translate}}</div>
     </div>
   </div>
   <div class="form-group">


[05/16] ambari git commit: AMBARI-14816: Run a service check on PXF service for Hive (lavjain via jaoki)

Posted by nc...@apache.org.
AMBARI-14816: Run a service check on PXF service for Hive (lavjain via jaoki)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/7632b7bb
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/7632b7bb
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/7632b7bb

Branch: refs/heads/branch-dev-patch-upgrade
Commit: 7632b7bb2454a67015308da25c588dd0bf4aafe2
Parents: 6fb0058
Author: Jun Aoki <ja...@apache.org>
Authored: Tue Feb 2 16:19:24 2016 -0800
Committer: Jun Aoki <ja...@apache.org>
Committed: Tue Feb 2 16:19:24 2016 -0800

----------------------------------------------------------------------
 .../PXF/3.0.0/package/scripts/params.py         |   6 +-
 .../PXF/3.0.0/package/scripts/pxf_constants.py  |   1 +
 .../PXF/3.0.0/package/scripts/service_check.py  | 101 +++++++++++++------
 3 files changed, 76 insertions(+), 32 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/7632b7bb/ambari-server/src/main/resources/common-services/PXF/3.0.0/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/PXF/3.0.0/package/scripts/params.py b/ambari-server/src/main/resources/common-services/PXF/3.0.0/package/scripts/params.py
index 36dc7c1..7749de7 100644
--- a/ambari-server/src/main/resources/common-services/PXF/3.0.0/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/PXF/3.0.0/package/scripts/params.py
@@ -16,7 +16,6 @@ See the License for the specific language governing permissions and
 limitations under the License.
 """
 
-import os
 import functools
 
 from resource_management import Script
@@ -35,6 +34,7 @@ pxf_group = pxf_user
 hdfs_superuser = config['configurations']['hadoop-env']['hdfs_user']
 hdfs_superuser_group = config["configurations"]["hdfs-site"]["dfs.permissions.superusergroup"]
 user_group = config["configurations"]["cluster-env"]["user_group"]
+hive_user = default('configurations/hive-env/hive_user', None)
 tomcat_group = "tomcat"
 
 # Directories
@@ -54,11 +54,13 @@ realm_name = config['configurations']['kerberos-env']['realm']
 #HBase
 is_hbase_installed = default("/clusterHostInfo/hbase_master_hosts", None) is not None
 
+#Hive
+is_hive_installed = default("/clusterHostInfo/hive_server_host", None) is not None
+
 # HDFS
 hdfs_site = config['configurations']['hdfs-site']
 default_fs = config['configurations']['core-site']['fs.defaultFS']
 
-security_enabled = config['configurations']['cluster-env']['security_enabled']
 hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab']
 kinit_path_local = get_kinit_path(default('/configurations/kerberos-env/executable_search_paths', None))
 hdfs_principal_name = config['configurations']['hadoop-env']['hdfs_principal_name']

http://git-wip-us.apache.org/repos/asf/ambari/blob/7632b7bb/ambari-server/src/main/resources/common-services/PXF/3.0.0/package/scripts/pxf_constants.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/PXF/3.0.0/package/scripts/pxf_constants.py b/ambari-server/src/main/resources/common-services/PXF/3.0.0/package/scripts/pxf_constants.py
index 3138379..9d93a38 100644
--- a/ambari-server/src/main/resources/common-services/PXF/3.0.0/package/scripts/pxf_constants.py
+++ b/ambari-server/src/main/resources/common-services/PXF/3.0.0/package/scripts/pxf_constants.py
@@ -24,3 +24,4 @@ pxf_hdfs_test_dir = "/pxf_hdfs_smoke_test"
 pxf_hdfs_read_test_file = pxf_hdfs_test_dir + "/pxf_smoke_test_read_data"
 pxf_hdfs_write_test_file = pxf_hdfs_test_dir + "/pxf_smoke_test_write_data"
 pxf_hbase_test_table = "pxf_hbase_smoke_test_table"
+pxf_hive_test_table = "pxf_hive_smoke_test_table"

http://git-wip-us.apache.org/repos/asf/ambari/blob/7632b7bb/ambari-server/src/main/resources/common-services/PXF/3.0.0/package/scripts/service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/PXF/3.0.0/package/scripts/service_check.py b/ambari-server/src/main/resources/common-services/PXF/3.0.0/package/scripts/service_check.py
index 72b59e4..064be04 100644
--- a/ambari-server/src/main/resources/common-services/PXF/3.0.0/package/scripts/service_check.py
+++ b/ambari-server/src/main/resources/common-services/PXF/3.0.0/package/scripts/service_check.py
@@ -24,8 +24,6 @@ from resource_management.core.resources.system import Execute
 from pxf_utils import makeHTTPCall, runLocalCmd
 import pxf_constants
 
-import sys
-
 class PXFServiceCheck(Script):
   """
   Runs a set of simple PXF tests to verify if the service has been setup correctly
@@ -51,10 +49,11 @@ class PXFServiceCheck(Script):
     import params
     self.pxf_version = self.__get_pxf_protocol_version()
     try:
-      self.cleanup_test_data()
       self.run_hdfs_tests()
       if params.is_hbase_installed:
         self.run_hbase_tests()
+      if params.is_hive_installed:
+        self.run_hive_tests()
     except:
       msg = "PXF service check failed"
       Logger.error(msg)
@@ -75,6 +74,8 @@ class PXFServiceCheck(Script):
     self.__cleanup_hdfs_data()
     if params.is_hbase_installed:
       self.__cleanup_hbase_data()
+    if params.is_hive_installed:
+      self.__cleanup_hive_data()
 
 
   def __get_pxf_protocol_version(self):
@@ -98,17 +99,33 @@ class PXFServiceCheck(Script):
     raise Fail(msg)
 
 
+  def __check_pxf_read(self, headers):
+    """
+    Performs a generic PXF read
+    """
+    url = self.base_url + self.pxf_version + "/Fragmenter/getFragments?path="
+    try:
+      response = makeHTTPCall(url, headers)
+      if not "PXFFragments" in response:
+        Logger.error("Unable to find PXFFragments in the response")
+        raise
+    except:
+      msg = "PXF data read failed"
+      raise Fail(msg)
+
+
+  # HDFS Routines
   def run_hdfs_tests(self):
     """
     Runs a set of PXF HDFS checks
     """
     Logger.info("Running PXF HDFS checks")
-    self.__check_if_client_exists("HDFS")
+    self.__check_if_client_exists("Hadoop-HDFS")
+    self.__cleanup_hdfs_data()
     self.__write_hdfs_data()
     self.__check_pxf_hdfs_read()
     self.__check_pxf_hdfs_write()
 
-
   def __write_hdfs_data(self):
     """
     Writes some test HDFS data for the tests
@@ -127,22 +144,6 @@ class PXFServiceCheck(Script):
         action="create_on_execute"
         )
 
- 
-  def __check_pxf_read(self, headers):
-    """
-    Performs a generic PXF read 
-    """
-    url = self.base_url + self.pxf_version + "/Fragmenter/getFragments?path="
-    try:
-      response = makeHTTPCall(url, headers)
-      if not "PXFFragments" in response:
-        Logger.error("Unable to find PXFFragments in the response")
-        raise 
-    except:
-      msg = "PXF data read failed"
-      raise Fail(msg)
-
-
   def __check_pxf_hdfs_read(self):
     """
     Reads the test HDFS data through PXF
@@ -155,7 +156,6 @@ class PXFServiceCheck(Script):
     headers.update(self.commonPXFHeaders)
     self.__check_pxf_read(headers)
 
-
   def __check_pxf_hdfs_write(self):
     """
     Writes some test HDFS data through PXF
@@ -184,7 +184,6 @@ class PXFServiceCheck(Script):
       msg = "PXF HDFS data write test failed"
       raise Fail(msg)
 
-
   def __cleanup_hdfs_data(self):
     """
     Cleans up the test HDFS data
@@ -201,16 +200,17 @@ class PXFServiceCheck(Script):
         )
 
 
+  # HBase Routines
   def run_hbase_tests(self):
     """
     Runs a set of PXF HBase checks
     """
     Logger.info("Running PXF HBase checks")
+    self.__cleanup_hbase_data()
     self.__check_if_client_exists("HBase")
     self.__write_hbase_data()
     self.__check_pxf_hbase_read()
 
-
   def __write_hbase_data(self):
     """
     Creates a temporary HBase table for the service checks
@@ -219,7 +219,6 @@ class PXFServiceCheck(Script):
     Execute("echo \"create '" + pxf_constants.pxf_hbase_test_table + "', 'cf'\"|hbase shell", logoutput = True)
     Execute("echo \"put '" + pxf_constants.pxf_hbase_test_table + "', 'row1', 'cf:a', 'value1'; put '" + pxf_constants.pxf_hbase_test_table + "', 'row1', 'cf:b', 'value2'\" | hbase shell", logoutput = True)
 
-
   def __check_pxf_hbase_read(self):
     """
     Checks reading HBase data through PXF
@@ -233,7 +232,6 @@ class PXFServiceCheck(Script):
 
     self.__check_pxf_read(headers)
 
-
   def __cleanup_hbase_data(self):
     """
     Cleans up the test HBase data
@@ -243,6 +241,49 @@ class PXFServiceCheck(Script):
     Execute("echo \"drop '" + pxf_constants.pxf_hbase_test_table + "'\"|hbase shell > /dev/null 2>&1", logoutput = True)
 
 
+  # Hive Routines
+  def run_hive_tests(self):
+    """
+    Runs a set of PXF Hive checks
+    """
+    Logger.info("Running PXF Hive checks")
+    self.__check_if_client_exists("Hive")
+    self.__cleanup_hive_data()
+    self.__write_hive_data()
+    self.__check_pxf_hive_read()
+
+  def __write_hive_data(self):
+    """
+    Creates a temporary Hive table for the service checks
+    """
+    import params
+    Logger.info("Creating temporary Hive test data")
+    cmd = "hive -e 'CREATE TABLE IF NOT EXISTS {0} (id INT); INSERT INTO {0} VALUES (1);'".format(pxf_constants.pxf_hive_test_table)
+    Execute(cmd, logoutput = True, user = params.hive_user)
+
+  def __check_pxf_hive_read(self):
+    """
+    Checks reading Hive data through PXF
+    """
+    Logger.info("Testing PXF Hive data read")
+    headers = {
+        "X-GP-DATA-DIR": pxf_constants.pxf_hive_test_table,
+        "X-GP-profile": "Hive",
+        }
+    headers.update(self.commonPXFHeaders)
+    self.__check_pxf_read(headers)
+
+  def __cleanup_hive_data(self):
+    """
+    Cleans up the test Hive data
+    """
+    import params
+    Logger.info("Cleaning up Hive test data")
+    cmd = "hive -e 'DROP TABLE IF EXISTS {0};'".format(pxf_constants.pxf_hive_test_table)
+    Execute(cmd, logoutput = True, user = params.hive_user)
+
+
+  # Package Routines
   def __package_exists(self, pkg):
     """
     Low level function to check if a rpm is installed
@@ -250,15 +291,15 @@ class PXFServiceCheck(Script):
     if System.get_instance().os_family == "suse":
       return not runLocalCmd("zypper search " + pkg)
     else:
-      return not runLocalCmd("yum list installed | grep -i " + pkg)
+      return not runLocalCmd("yum list installed | egrep -i ^" + pkg)
 
 
   def __check_if_client_exists(self, serviceName):
     Logger.info("Checking if " + serviceName + " client libraries exist")
     if not self.__package_exists(serviceName):
-      error_msg = serviceName + "client libraries do not exist on the PXF node"
-      Logger.error(msg)
-      raise Fail(msg)
+      error_msg = serviceName + " client libraries do not exist on the PXF node"
+      Logger.error(error_msg)
+      raise Fail(error_msg)
 
 
 if __name__ == "__main__":


[14/16] ambari git commit: AMBARI-14891. RU/EU should only delete configs on downgrade if source stack matches stack whose status is CURRENT (alejandro)

Posted by nc...@apache.org.
AMBARI-14891. RU/EU should only delete configs on downgrade if source stack matches stack whose status is CURRENT (alejandro)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/7259d972
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/7259d972
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/7259d972

Branch: refs/heads/branch-dev-patch-upgrade
Commit: 7259d972e98b69415bf786f430a6a1276f9e78b7
Parents: 6d9e059
Author: Alejandro Fernandez <af...@hortonworks.com>
Authored: Wed Feb 3 09:43:50 2016 -0800
Committer: Alejandro Fernandez <af...@hortonworks.com>
Committed: Wed Feb 3 09:43:53 2016 -0800

----------------------------------------------------------------------
 .../upgrades/FinalizeUpgradeAction.java         |  23 ++-
 .../upgrades/UpgradeActionTest.java             | 160 ++++++++++++++++---
 2 files changed, 156 insertions(+), 27 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/7259d972/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/FinalizeUpgradeAction.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/FinalizeUpgradeAction.java b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/FinalizeUpgradeAction.java
index 9331ef0..03d407a 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/FinalizeUpgradeAction.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/FinalizeUpgradeAction.java
@@ -39,6 +39,7 @@ import org.apache.ambari.server.orm.entities.ClusterVersionEntity;
 import org.apache.ambari.server.orm.entities.HostComponentStateEntity;
 import org.apache.ambari.server.orm.entities.HostEntity;
 import org.apache.ambari.server.orm.entities.HostVersionEntity;
+import org.apache.ambari.server.orm.entities.RepositoryVersionEntity;
 import org.apache.ambari.server.serveraction.AbstractServerAction;
 import org.apache.ambari.server.state.Cluster;
 import org.apache.ambari.server.state.Clusters;
@@ -64,6 +65,8 @@ public class FinalizeUpgradeAction extends AbstractServerAction {
   public static final String CLUSTER_NAME_KEY = "cluster_name";
   public static final String UPGRADE_DIRECTION_KEY = "upgrade_direction";
   public static final String VERSION_KEY = "version";
+  public static final String PREVIOUS_UPGRADE_NOT_COMPLETED_MSG = "It is possible that a previous upgrade was not finalized. " +
+      "For this reason, Ambari will not remove any configs. Please ensure that all database records are correct.";
 
   /**
    * The original "current" stack of the cluster before the upgrade started.
@@ -308,9 +311,25 @@ public class FinalizeUpgradeAction extends AbstractServerAction {
       Cluster cluster = clusters.getCluster(clusterName);
       StackId currentClusterStackId = cluster.getCurrentStackVersion();
 
-      // this was a cross-stack upgrade, meaning that configurations were
-      // created that now need to be removed
+      // Safety check that the cluster's stack (from clusterstate's current_stack_id) is equivalent to the
+      // cluster's CURRENT repo version's stack. This is to avoid deleting configs from the target stack if the customer
+      // ended up modifying their database manually after a stack upgrade and forgot to call "Save DB State".
+      ClusterVersionEntity currentClusterVersion = cluster.getCurrentClusterVersion();
+      RepositoryVersionEntity currentRepoVersion = currentClusterVersion.getRepositoryVersion();
+      StackId currentRepoStackId = currentRepoVersion.getStackId();
+      if (!currentRepoStackId.equals(originalStackId)) {
+        String msg = String.format("The stack of Cluster %s's CURRENT repo version is %s, yet the original stack id from " +
+            "the Stack Upgrade has a different value of %s. %s",
+            clusterName, currentRepoStackId.getStackId(), originalStackId.getStackId(), PREVIOUS_UPGRADE_NOT_COMPLETED_MSG);
+        out.append(msg);
+        err.append(msg);
+        throw new AmbariException("The source target stack doesn't match the cluster's CURRENT repo version's stack.");
+      }
+
+      // This was a cross-stack upgrade, meaning that configurations were created that now need to be removed.
       if (!originalStackId.equals(targetStackId)) {
+        out.append(String.format("Will remove configs since the original stack %s differs from the target stack %s " +
+            "that Ambari just downgraded from.", originalStackId.getStackId(), targetStackId.getStackId()));
         cluster.removeConfigurations(targetStackId);
       }
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/7259d972/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/UpgradeActionTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/UpgradeActionTest.java b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/UpgradeActionTest.java
index 7a1958f..989eba2 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/UpgradeActionTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/UpgradeActionTest.java
@@ -92,10 +92,13 @@ import com.google.inject.persist.UnitOfWork;
  * Tests upgrade-related server side actions
  */
 public class UpgradeActionTest {
+  private static final String clusterName = "c1";
+
   private static final String HDP_2_1_1_0 = "2.1.1.0-1";
   private static final String HDP_2_1_1_1 = "2.1.1.1-2";
 
   private static final String HDP_2_2_0_1 = "2.2.0.1-3";
+  private static final String HDP_2_2_0_2 = "2.2.0.2-4";
 
   private static final StackId HDP_21_STACK = new StackId("HDP-2.1.1");
   private static final StackId HDP_22_STACK = new StackId("HDP-2.2.0");
@@ -161,7 +164,6 @@ public class UpgradeActionTest {
   }
 
   private void makeDowngradeCluster(StackId sourceStack, String sourceRepo, StackId targetStack, String targetRepo) throws Exception {
-    String clusterName = "c1";
     String hostName = "h1";
 
     clusters.addCluster(clusterName, sourceStack);
@@ -200,8 +202,71 @@ public class UpgradeActionTest {
     hostVersionDAO.create(entity);
   }
 
+  private void makeTwoUpgradesWhereLastDidNotComplete(StackId sourceStack, String sourceRepo, StackId midStack, String midRepo, StackId targetStack, String targetRepo) throws Exception {
+    String hostName = "h1";
+
+    clusters.addCluster(clusterName, sourceStack);
+
+    Cluster c = clusters.getCluster(clusterName);
+
+    // add a host component
+    clusters.addHost(hostName);
+
+    Host host = clusters.getHost(hostName);
+
+    Map<String, String> hostAttributes = new HashMap<String, String>();
+    hostAttributes.put("os_family", "redhat");
+    hostAttributes.put("os_release_version", "6");
+    host.setHostAttributes(hostAttributes);
+    host.persist();
+
+    // Create the starting repo version
+    m_helper.getOrCreateRepositoryVersion(sourceStack, sourceRepo);
+    c.createClusterVersion(sourceStack, sourceRepo, "admin", RepositoryVersionState.UPGRADING);
+    c.transitionClusterVersion(sourceStack, sourceRepo, RepositoryVersionState.CURRENT);
+
+    // Start upgrading the mid repo
+    m_helper.getOrCreateRepositoryVersion(midStack, midRepo);
+    c.setDesiredStackVersion(midStack);
+    c.createClusterVersion(midStack, midRepo, "admin", RepositoryVersionState.INSTALLING);
+    c.transitionClusterVersion(midStack, midRepo, RepositoryVersionState.INSTALLED);
+    c.transitionClusterVersion(midStack, midRepo, RepositoryVersionState.UPGRADING);
+    c.transitionClusterVersion(midStack, midRepo, RepositoryVersionState.UPGRADED);
+    c.transitionClusterVersion(midStack, midRepo, RepositoryVersionState.CURRENT);
+
+    // Set original version as INSTALLED
+    c.transitionClusterVersion(sourceStack, sourceRepo, RepositoryVersionState.INSTALLED);
+
+    // Notice that we have not yet changed the cluster current stack to the mid stack to simulate
+    // the user skipping this step.
+
+    m_helper.getOrCreateRepositoryVersion(targetStack, targetRepo);
+    c.setDesiredStackVersion(targetStack);
+    c.createClusterVersion(targetStack, targetRepo, "admin", RepositoryVersionState.INSTALLING);
+    c.transitionClusterVersion(targetStack, targetRepo, RepositoryVersionState.INSTALLED);
+    c.transitionClusterVersion(targetStack, targetRepo, RepositoryVersionState.UPGRADING);
+    c.transitionClusterVersion(targetStack, targetRepo, RepositoryVersionState.UPGRADED);
+
+    // Create a host version for the starting repo in INSTALLED
+    HostVersionEntity entitySource = new HostVersionEntity();
+    entitySource.setHostEntity(hostDAO.findByName(hostName));
+    entitySource.setRepositoryVersion(repoVersionDAO.findByStackAndVersion(sourceStack, sourceRepo));
+    entitySource.setState(RepositoryVersionState.INSTALL_FAILED);
+    hostVersionDAO.create(entitySource);
+
+    // Create a host version for the mid repo in CURRENT
+    c.mapHostVersions(Collections.singleton(hostName), c.getCurrentClusterVersion(),
+        RepositoryVersionState.CURRENT);
+
+    // Create a host version for the target repo in UPGRADED
+    HostVersionEntity entityTarget = new HostVersionEntity();
+    entityTarget.setHostEntity(hostDAO.findByName(hostName));
+    entityTarget.setRepositoryVersion(repoVersionDAO.findByStackAndVersion(targetStack, targetRepo));
+    entityTarget.setState(RepositoryVersionState.UPGRADED);
+    hostVersionDAO.create(entityTarget);
+  }
+
   private void makeUpgradeCluster(StackId sourceStack, String sourceRepo, StackId targetStack, String targetRepo) throws Exception {
-    String clusterName = "c1";
     String hostName = "h1";
 
     Clusters clusters = m_injector.getInstance(Clusters.class);
@@ -272,7 +337,6 @@ public class UpgradeActionTest {
   }
 
   private void makeCrossStackUpgradeCluster(StackId sourceStack, String sourceRepo, StackId targetStack, String targetRepo) throws Exception {
-    String clusterName = "c1";
     String hostName = "h1";
 
     Clusters clusters = m_injector.getInstance(Clusters.class);
@@ -356,7 +420,7 @@ public class UpgradeActionTest {
     RepositoryVersionEntity targetRve = repoVersionDAO.findByStackNameAndVersion("HDP", targetRepo);
     Assert.assertNotNull(targetRve);
 
-    Cluster cluster = clusters.getCluster("c1");
+    Cluster cluster = clusters.getCluster(clusterName);
 
     // Install ZK and HDFS with some components
     Service zk = installService(cluster, "ZOOKEEPER");
@@ -391,7 +455,7 @@ public class UpgradeActionTest {
     String userName = "admin";
     roleParams.put(ServerAction.ACTION_USER_NAME, userName);
     executionCommand.setRoleParams(roleParams);
-    executionCommand.setClusterName("c1");
+    executionCommand.setClusterName(clusterName);
 
     HostRoleCommand hostRoleCommand = hostRoleCommandFactory.create(null, null, null, null);
     hostRoleCommand.setExecutionCommandWrapper(new ExecutionCommandWrapper(executionCommand));
@@ -425,10 +489,12 @@ public class UpgradeActionTest {
     Map<String, String> commandParams = new HashMap<String, String>();
     commandParams.put(FinalizeUpgradeAction.UPGRADE_DIRECTION_KEY, "downgrade");
     commandParams.put(FinalizeUpgradeAction.VERSION_KEY, sourceRepo);
+    commandParams.put(FinalizeUpgradeAction.ORIGINAL_STACK_KEY, sourceStack.getStackId());
+    commandParams.put(FinalizeUpgradeAction.TARGET_STACK_KEY, targetStack.getStackId());
 
     ExecutionCommand executionCommand = new ExecutionCommand();
     executionCommand.setCommandParams(commandParams);
-    executionCommand.setClusterName("c1");
+    executionCommand.setClusterName(clusterName);
 
     HostRoleCommand hostRoleCommand = hostRoleCommandFactory.create(null, null, null, null);
     hostRoleCommand.setExecutionCommandWrapper(new ExecutionCommandWrapper(executionCommand));
@@ -441,7 +507,7 @@ public class UpgradeActionTest {
     assertNotNull(report);
     assertEquals(HostRoleStatus.COMPLETED.name(), report.getStatus());
 
-    for (HostVersionEntity entity : hostVersionDAO.findByClusterAndHost("c1", "h1")) {
+    for (HostVersionEntity entity : hostVersionDAO.findByClusterAndHost(clusterName, "h1")) {
       if (entity.getRepositoryVersion().getVersion().equals(sourceRepo)) {
         assertEquals(RepositoryVersionState.CURRENT, entity.getState());
       } else if (entity.getRepositoryVersion().getVersion().equals(targetRepo)) {
@@ -449,7 +515,7 @@ public class UpgradeActionTest {
       }
     }
 
-    for (ClusterVersionEntity entity : clusterVersionDAO.findByCluster("c1")) {
+    for (ClusterVersionEntity entity : clusterVersionDAO.findByCluster(clusterName)) {
       if (entity.getRepositoryVersion().getVersion().equals(sourceRepo)) {
         assertEquals(RepositoryVersionState.CURRENT, entity.getState());
       } else if (entity.getRepositoryVersion().getVersion().equals(targetRepo)) {
@@ -458,6 +524,50 @@ public class UpgradeActionTest {
     }
   }
 
+  /**
+   * Test a case in which a customer performs an upgrade from HDP 2.1 to 2.2 (e.g., 2.2.0.0), but skips the step to
+   * finalize, which calls "Save DB State". Therefore, the cluster's current stack is still on HDP 2.1.
+   * They can still modify the database manually to mark HDP 2.2 as CURRENT in the cluster_version and then begin
+   * another upgrade to 2.2.0.2 and then downgrade.
+   * In the downgrade, the original stack is still 2.1 but the stack for the version marked as CURRENT is 2.2; this
+   * mismatch means that the downgrade should not delete configs and will report a warning.
+   * @throws Exception
+   */
+  @Test
+  public void testFinalizeDowngradeWhenDidNotFinalizePreviousUpgrade() throws Exception {
+    StackId sourceStack = HDP_21_STACK;
+    StackId midStack = HDP_22_STACK;
+    StackId targetStack = HDP_22_STACK;
+
+    String sourceRepo = HDP_2_1_1_0;
+    String midRepo = HDP_2_2_0_1;
+    String targetRepo = HDP_2_2_0_2;
+
+    makeTwoUpgradesWhereLastDidNotComplete(sourceStack, sourceRepo, midStack, midRepo, targetStack, targetRepo);
+
+    Map<String, String> commandParams = new HashMap<String, String>();
+    commandParams.put(FinalizeUpgradeAction.UPGRADE_DIRECTION_KEY, "downgrade");
+    commandParams.put(FinalizeUpgradeAction.VERSION_KEY, midRepo);
+    commandParams.put(FinalizeUpgradeAction.ORIGINAL_STACK_KEY, sourceStack.getStackId());
+    commandParams.put(FinalizeUpgradeAction.TARGET_STACK_KEY, targetStack.getStackId());
+
+    ExecutionCommand executionCommand = new ExecutionCommand();
+    executionCommand.setCommandParams(commandParams);
+    executionCommand.setClusterName(clusterName);
+
+    HostRoleCommand hostRoleCommand = hostRoleCommandFactory.create(null, null, null, null);
+    hostRoleCommand.setExecutionCommandWrapper(new ExecutionCommandWrapper(executionCommand));
+
+    FinalizeUpgradeAction action = m_injector.getInstance(FinalizeUpgradeAction.class);
+    action.setExecutionCommand(executionCommand);
+    action.setHostRoleCommand(hostRoleCommand);
+
+    CommandReport report = action.execute(null);
+    assertNotNull(report);
+    assertEquals(HostRoleStatus.FAILED.name(), report.getStatus());
+    assertTrue(report.getStdErr().contains(FinalizeUpgradeAction.PREVIOUS_UPGRADE_NOT_COMPLETED_MSG));
+  }
+
   @Test
   public void testFinalizeUpgrade() throws Exception {
     StackId sourceStack = HDP_21_STACK;
@@ -471,7 +581,7 @@ public class UpgradeActionTest {
     AmbariMetaInfo metaInfo = m_injector.getInstance(AmbariMetaInfo.class);
     AmbariCustomCommandExecutionHelper helper = m_injector.getInstance(AmbariCustomCommandExecutionHelper.class);
     Host host = clusters.getHost("h1");
-    Cluster cluster = clusters.getCluster("c1");
+    Cluster cluster = clusters.getCluster(clusterName);
 
     RepositoryInfo repo = metaInfo.getRepository(sourceStack.getStackName(), sourceStack.getStackVersion(), "redhat6", sourceStack.getStackId());
     assertEquals(HDP_211_CENTOS6_REPO_URL, repo.getBaseUrl());
@@ -484,7 +594,7 @@ public class UpgradeActionTest {
 
     ExecutionCommand executionCommand = new ExecutionCommand();
     executionCommand.setCommandParams(commandParams);
-    executionCommand.setClusterName("c1");
+    executionCommand.setClusterName(clusterName);
 
     HostRoleCommand hostRoleCommand = hostRoleCommandFactory.create(null, null, null, null);
     hostRoleCommand.setExecutionCommandWrapper(new ExecutionCommandWrapper(executionCommand));
@@ -534,7 +644,7 @@ public class UpgradeActionTest {
     AmbariMetaInfo metaInfo = m_injector.getInstance(AmbariMetaInfo.class);
     AmbariCustomCommandExecutionHelper helper = m_injector.getInstance(AmbariCustomCommandExecutionHelper.class);
     Host host = clusters.getHost("h1");
-    Cluster cluster = clusters.getCluster("c1");
+    Cluster cluster = clusters.getCluster(clusterName);
 
     RepositoryInfo repo = metaInfo.getRepository(sourceStack.getStackName(),
         sourceStack.getStackVersion(), "redhat6", sourceStack.getStackId());
@@ -548,7 +658,7 @@ public class UpgradeActionTest {
 
     ExecutionCommand executionCommand = new ExecutionCommand();
     executionCommand.setCommandParams(commandParams);
-    executionCommand.setClusterName("c1");
+    executionCommand.setClusterName(clusterName);
 
     HostRoleCommand hostRoleCommand = hostRoleCommandFactory.create(null, null, null, null);
     hostRoleCommand.setExecutionCommandWrapper(new ExecutionCommandWrapper(executionCommand));
@@ -584,7 +694,7 @@ public class UpgradeActionTest {
 
     makeCrossStackUpgradeCluster(sourceStack, sourceRepo, targetStack, targetRepo);
 
-    Cluster cluster = clusters.getCluster("c1");
+    Cluster cluster = clusters.getCluster(clusterName);
 
     // setup the cluster for the upgrade across stacks
     cluster.setCurrentStackVersion(sourceStack);
@@ -598,7 +708,7 @@ public class UpgradeActionTest {
 
     ExecutionCommand executionCommand = new ExecutionCommand();
     executionCommand.setCommandParams(commandParams);
-    executionCommand.setClusterName("c1");
+    executionCommand.setClusterName(clusterName);
 
     HostRoleCommand hostRoleCommand = hostRoleCommandFactory.create(null, null, null, null);
 
@@ -635,7 +745,7 @@ public class UpgradeActionTest {
     String targetRepo = HDP_2_2_0_1;
 
     makeCrossStackUpgradeCluster(sourceStack, sourceRepo, targetStack, targetRepo);
-    Cluster cluster = clusters.getCluster("c1");
+    Cluster cluster = clusters.getCluster(clusterName);
 
     // install HDFS with some components
     Service service = installService(cluster, "HDFS");
@@ -656,7 +766,7 @@ public class UpgradeActionTest {
     createConfigs(cluster);
 
     // verify we have configs in both HDP stacks
-    cluster = clusters.getCluster("c1");
+    cluster = clusters.getCluster(clusterName);
     Collection<Config> configs = cluster.getAllConfigs();
     assertEquals(8, configs.size());
 
@@ -668,7 +778,7 @@ public class UpgradeActionTest {
 
     ExecutionCommand executionCommand = new ExecutionCommand();
     executionCommand.setCommandParams(commandParams);
-    executionCommand.setClusterName("c1");
+    executionCommand.setClusterName(clusterName);
 
     HostRoleCommand hostRoleCommand = hostRoleCommandFactory.create(null, null, null, null);
 
@@ -676,7 +786,7 @@ public class UpgradeActionTest {
 
     HostVersionDAO dao = m_injector.getInstance(HostVersionDAO.class);
 
-    List<HostVersionEntity> hosts = dao.findByClusterStackAndVersion("c1", targetStack, targetRepo);
+    List<HostVersionEntity> hosts = dao.findByClusterStackAndVersion(clusterName, targetStack, targetRepo);
     assertFalse(hosts.isEmpty());
     for (HostVersionEntity hve : hosts) {
       assertFalse(hve.getState() == RepositoryVersionState.INSTALLED);
@@ -699,11 +809,11 @@ public class UpgradeActionTest {
     assertEquals(sourceStack, desiredStackId);
 
     // verify we have configs in only 1 stack
-    cluster = clusters.getCluster("c1");
+    cluster = clusters.getCluster(clusterName);
     configs = cluster.getAllConfigs();
     assertEquals(4, configs.size());
 
-    hosts = dao.findByClusterStackAndVersion("c1", targetStack, targetRepo);
+    hosts = dao.findByClusterStackAndVersion(clusterName, targetStack, targetRepo);
     assertFalse(hosts.isEmpty());
     for (HostVersionEntity hve : hosts) {
       assertTrue(hve.getState() == RepositoryVersionState.INSTALLED);
@@ -726,7 +836,7 @@ public class UpgradeActionTest {
 
     makeCrossStackUpgradeCluster(sourceStack, sourceRepo, targetStack, targetRepo);
 
-    Cluster cluster = clusters.getCluster("c1");
+    Cluster cluster = clusters.getCluster(clusterName);
 
     Service service = installService(cluster, "HDFS");
     addServiceComponent(cluster, service, "NAMENODE");
@@ -749,14 +859,14 @@ public class UpgradeActionTest {
     // inject an unhappy path where the cluster repo version is still UPGRADING
     // even though all of the hosts are UPGRADED
     ClusterVersionEntity upgradingClusterVersion = clusterVersionDAO.findByClusterAndStackAndVersion(
-        "c1", HDP_22_STACK, targetRepo);
+        clusterName, HDP_22_STACK, targetRepo);
 
     upgradingClusterVersion.setState(RepositoryVersionState.UPGRADING);
     upgradingClusterVersion = clusterVersionDAO.merge(upgradingClusterVersion);
 
     // verify the conditions for the test are met properly
-    upgradingClusterVersion = clusterVersionDAO.findByClusterAndStackAndVersion("c1", HDP_22_STACK, targetRepo);
-    List<HostVersionEntity> hostVersions = hostVersionDAO.findByClusterStackAndVersion("c1", HDP_22_STACK, targetRepo);
+    upgradingClusterVersion = clusterVersionDAO.findByClusterAndStackAndVersion(clusterName, HDP_22_STACK, targetRepo);
+    List<HostVersionEntity> hostVersions = hostVersionDAO.findByClusterStackAndVersion(clusterName, HDP_22_STACK, targetRepo);
 
     assertEquals(RepositoryVersionState.UPGRADING, upgradingClusterVersion.getState());
     assertTrue(hostVersions.size() > 0);
@@ -774,7 +884,7 @@ public class UpgradeActionTest {
 
     ExecutionCommand executionCommand = new ExecutionCommand();
     executionCommand.setCommandParams(commandParams);
-    executionCommand.setClusterName("c1");
+    executionCommand.setClusterName(clusterName);
 
     HostRoleCommand hostRoleCommand = hostRoleCommandFactory.create(null, null, null, null);
 


[04/16] ambari git commit: AMBARI-14872: HAWQ service check fails if master port is not 5432 (adenissov via jaoki)

Posted by nc...@apache.org.
AMBARI-14872: HAWQ service check fails if master port is not 5432 (adenissov via jaoki)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/6fb00585
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/6fb00585
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/6fb00585

Branch: refs/heads/branch-dev-patch-upgrade
Commit: 6fb00585cf9b7e55ce615bbfc4d7ee11f4ed319e
Parents: 94c13ee
Author: Jun Aoki <ja...@apache.org>
Authored: Tue Feb 2 15:51:24 2016 -0800
Committer: Jun Aoki <ja...@apache.org>
Committed: Tue Feb 2 15:51:24 2016 -0800

----------------------------------------------------------------------
 .../HAWQ/2.0.0/package/scripts/service_check.py | 22 ++++++++++----------
 .../HAWQ/2.0.0/package/scripts/utils.py         |  4 ++--
 2 files changed, 13 insertions(+), 13 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/6fb00585/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/service_check.py b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/service_check.py
index 4e5dc32..6e7ea0f 100644
--- a/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/service_check.py
+++ b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/service_check.py
@@ -30,18 +30,17 @@ class HAWQServiceCheck(Script):
   Runs a set of HAWQ tests to verify if the service has been setup correctly
   """
 
-  def __init__(self):
-    self.active_master_host = common.get_local_hawq_site_property("hawq_master_address_host")
-    self.checks_failed = 0
-    self.total_checks = 3
-
-
   def service_check(self, env):
     """
     Runs service check for HAWQ.
     """
     import params
 
+    self.active_master_host = params.hawqmaster_host
+    self.active_master_port = params.hawq_master_address_port
+    self.checks_failed = 0
+    self.total_checks = 2
+
     # Checks HAWQ cluster state
     self.check_state()
 
@@ -50,6 +49,7 @@ class HAWQServiceCheck(Script):
 
     # Runs check for writing and reading external tables on HDFS using PXF, if PXF is installed
     if params.is_pxf_installed:
+      self.total_checks += 1
       self.check_hawq_pxf_hdfs()
     else:
       Logger.info("PXF not installed. Skipping HAWQ-PXF checks...")
@@ -128,32 +128,32 @@ class HAWQServiceCheck(Script):
   def drop_table(self, table):
     Logger.info("Dropping {0} table if exists".format(table['name']))
     sql_cmd = "DROP {0} TABLE IF EXISTS {1}".format(table['drop_type'], table['name'])
-    exec_psql_cmd(sql_cmd, self.active_master_host)
+    exec_psql_cmd(sql_cmd, self.active_master_host, self.active_master_port)
 
 
   def create_table(self, table):
     Logger.info("Creating table {0}".format(table['name']))
     sql_cmd = "CREATE {0} TABLE {1} {2}".format(table['create_type'], table['name'], table['description'])
-    exec_psql_cmd(sql_cmd, self.active_master_host)
+    exec_psql_cmd(sql_cmd, self.active_master_host, self.active_master_port)
 
 
   def insert_data(self, table):
     Logger.info("Inserting data to table {0}".format(table['name']))
     sql_cmd = "INSERT INTO  {0} SELECT * FROM generate_series(1,10)".format(table['name'])
-    exec_psql_cmd(sql_cmd, self.active_master_host)
+    exec_psql_cmd(sql_cmd, self.active_master_host, self.active_master_port)
 
 
   def query_data(self, table):
     Logger.info("Querying data from table {0}".format(table['name']))
     sql_cmd = "SELECT * FROM {0}".format(table['name'])
-    exec_psql_cmd(sql_cmd, self.active_master_host)
+    exec_psql_cmd(sql_cmd, self.active_master_host, self.active_master_port)
 
 
   def validate_data(self, table):
     expected_data = "55"
     Logger.info("Validating data inserted, finding sum of all the inserted entries. Expected output: {0}".format(expected_data))
     sql_cmd = "SELECT sum(col1) FROM {0}".format(table['name'])
-    _, stdout, _ = exec_psql_cmd(sql_cmd, self.active_master_host, tuples_only=False)
+    _, stdout, _ = exec_psql_cmd(sql_cmd, self.active_master_host, self.active_master_port, tuples_only=False)
     if expected_data != stdout.strip():
       Logger.error("Incorrect data returned. Expected Data: {0} Actual Data: {1}".format(expected_data, stdout))
       raise Fail("Incorrect data returned.")

http://git-wip-us.apache.org/repos/asf/ambari/blob/6fb00585/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/utils.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/utils.py b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/utils.py
index ade4b90..e607a28 100644
--- a/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/utils.py
+++ b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/utils.py
@@ -91,11 +91,11 @@ def exec_ssh_cmd(hostname, cmd):
   return process.returncode, stdout, stderr
 
 
-def exec_psql_cmd(command, host, db="template1", tuples_only=True):
+def exec_psql_cmd(command, host, port, db="template1", tuples_only=True):
   """
   Sets up execution environment and runs the HAWQ queries
   """
-  src_cmd = "source {0}".format(hawq_constants.hawq_greenplum_path_file)
+  src_cmd = "export PGPORT={0} && source {1}".format(port, hawq_constants.hawq_greenplum_path_file)
   if tuples_only:
     cmd = src_cmd + " && psql -d {0} -c \\\\\\\"{1};\\\\\\\"".format(db, command)
   else:


[16/16] ambari git commit: Merge branch 'trunk' into branch-dev-patch-upgrade

Posted by nc...@apache.org.
Merge branch 'trunk' into branch-dev-patch-upgrade


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/c72dc41d
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/c72dc41d
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/c72dc41d

Branch: refs/heads/branch-dev-patch-upgrade
Commit: c72dc41db25fe5898fe7dc36d1aa3bdaf4d0fc5d
Parents: 4c5d2bd 424cca6
Author: Nate Cole <nc...@hortonworks.com>
Authored: Wed Feb 3 14:14:07 2016 -0500
Committer: Nate Cole <nc...@hortonworks.com>
Committed: Wed Feb 3 14:14:07 2016 -0500

----------------------------------------------------------------------
 .../app/scripts/controllers/NavbarCtrl.js       |   9 +
 .../authentication/AuthenticationMainCtrl.js    | 147 ++++++++++-
 .../loginActivities/LoginMessageMainCtrl.js     |  59 ++++-
 .../ui/admin-web/app/scripts/i18n.config.js     |  82 +++++-
 .../resources/ui/admin-web/app/styles/main.css  |  42 +++-
 .../app/views/authentication/main.html          | 250 ++++++++++++++++++-
 .../ui/admin-web/app/views/leftNavbar.html      |   4 +-
 .../app/views/loginActivities/loginMessage.html |  74 ++++--
 .../ui/admin-web/app/views/users/create.html    |   2 +-
 ambari-agent/conf/unix/install-helper.sh        |  40 ++-
 .../src/main/package/deb/control/postinst       |  12 +-
 .../src/main/package/deb/control/posttrm        |  15 --
 ambari-agent/src/main/package/deb/control/prerm |   8 -
 .../src/main/package/rpm/postinstall.sh         |  19 +-
 ambari-agent/src/main/package/rpm/preremove.sh  |   8 -
 .../ambari_agent/CustomServiceOrchestrator.py   |  11 +-
 .../main/python/ambari_agent/PythonExecutor.py  |  14 +-
 .../ambari_agent/PythonReflectiveExecutor.py    |   5 +-
 ambari-agent/src/packages/tarball/all.xml       |   4 +-
 .../timeline/AbstractTimelineMetricsSink.java   |   4 +-
 .../metrics2/sink/timeline/TimelineMetric.java  |  13 +-
 .../sink/timeline/TimelineMetricMetadata.java   |  15 +-
 .../timeline/cache/TimelineMetricsCache.java    |   9 +-
 .../sink/flume/FlumeTimelineMetricsSink.java    |   7 +-
 .../timeline/HadoopTimelineMetricsSink.java     |   6 +-
 .../timeline/HadoopTimelineMetricsSinkTest.java |   5 +-
 .../kafka/KafkaTimelineMetricsReporter.java     |  16 +-
 .../storm/StormTimelineMetricsReporter.java     |   2 -
 .../timeline/HBaseTimelineMetricStore.java      |  19 +-
 .../metrics/timeline/PhoenixHBaseAccessor.java  | 111 +++-----
 .../metrics/timeline/aggregators/Function.java  |  75 ++++--
 .../aggregators/TimelineMetricReadHelper.java   |  38 +++
 .../TimelineMetricMetadataManager.java          |   5 +-
 .../metrics/timeline/FunctionTest.java          |  10 +-
 .../timeline/HBaseTimelineMetricStoreTest.java  |  31 ++-
 ambari-server/conf/unix/install-helper.sh       |  48 +++-
 ambari-server/src/main/assemblies/server.xml    |   2 +-
 .../upgrades/FinalizeUpgradeAction.java         |  23 +-
 .../src/main/package/deb/control/postinst       |  10 +-
 .../src/main/package/deb/control/posttrm        |  15 --
 .../src/main/package/deb/control/prerm          |  18 +-
 .../src/main/package/rpm/postinstall.sh         |   8 +-
 ambari-server/src/main/package/rpm/preremove.sh |  18 +-
 .../configuration/application-properties.xml    |  36 ++-
 .../ATLAS/0.1.0.2.3/metainfo.xml                |   4 +
 .../ATLAS/0.1.0.2.3/package/scripts/params.py   |  11 +
 .../common-services/HAWQ/2.0.0/alerts.json      |  19 ++
 .../2.0.0/package/alerts/alert_sync_status.py   |  91 +++++++
 .../HAWQ/2.0.0/package/scripts/service_check.py |  22 +-
 .../HAWQ/2.0.0/package/scripts/utils.py         |   4 +-
 .../HIVE/0.12.0.2.0/configuration/hive-env.xml  |   2 +
 .../0.12.0.2.0/package/scripts/params_linux.py  |   2 +
 .../PXF/3.0.0/package/scripts/params.py         |   6 +-
 .../PXF/3.0.0/package/scripts/pxf_constants.py  |   1 +
 .../PXF/3.0.0/package/scripts/service_check.py  | 101 +++++---
 .../services/HIVE/configuration/hive-env.xml    |   2 +
 .../upgrades/UpgradeActionTest.java             | 160 ++++++++++--
 .../stacks/2.3/HAWQ/test_alert_sync_status.py   | 194 ++++++++++++++
 .../test/python/stacks/2.3/configs/default.json |   9 +-
 ambari-web/app/assets/data/clusters/info.json   |  11 -
 ambari-web/app/assets/data/settings/motd.json   |  10 +
 .../main/admin/stack_and_upgrade_controller.js  |   2 +
 .../alerts/definition_configs_controller.js     |  29 +++
 .../app/mappers/alert_definitions_mapper.js     |  36 +--
 ambari-web/app/messages.js                      |   1 +
 ambari-web/app/models/alerts/alert_config.js    |  62 ++++-
 .../app/models/alerts/alert_definition.js       |   4 +-
 ambari-web/app/models/upgrade_entity.js         |   2 +-
 ambari-web/app/router.js                        |  41 +++
 ambari-web/app/styles/alerts.less               |   4 +
 .../alerts/configs/alert_config_parameter.hbs   |  33 +++
 ambari-web/app/utils/ajax/ajax.js               |   4 +
 ambari-web/app/views/common/table_view.js       |  16 +-
 .../main/alerts/definition_configs_view.js      |  10 +
 .../admin/stack_and_upgrade_controller_test.js  |  17 ++
 .../definitions_configs_controller_test.js      |  44 +++-
 .../mappers/alert_definitions_mapper_test.js    |  45 +++-
 .../test/models/alerts/alert_config_test.js     | 100 ++++++++
 78 files changed, 2001 insertions(+), 447 deletions(-)
----------------------------------------------------------------------



[10/16] ambari git commit: AMBARI-14853 - Atlas Integration: Support deploying latest Atlas(which depends on kafka) using Ambari (tbeerbower)

Posted by nc...@apache.org.
AMBARI-14853 - Atlas Integration: Support deploying latest Atlas(which depends on kafka) using Ambari (tbeerbower)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/1d9f1bb0
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/1d9f1bb0
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/1d9f1bb0

Branch: refs/heads/branch-dev-patch-upgrade
Commit: 1d9f1bb069895d6dbc344a5cf0d81e56ff0b7371
Parents: 18463c8
Author: tbeerbower <tb...@hortonworks.com>
Authored: Wed Feb 3 09:20:46 2016 -0500
Committer: tbeerbower <tb...@hortonworks.com>
Committed: Wed Feb 3 09:57:36 2016 -0500

----------------------------------------------------------------------
 .../configuration/application-properties.xml    | 36 +++++++++++++++++++-
 .../ATLAS/0.1.0.2.3/metainfo.xml                |  4 +++
 .../ATLAS/0.1.0.2.3/package/scripts/params.py   | 11 ++++++
 .../HIVE/0.12.0.2.0/configuration/hive-env.xml  |  2 ++
 .../0.12.0.2.0/package/scripts/params_linux.py  |  2 ++
 .../services/HIVE/configuration/hive-env.xml    |  2 ++
 .../test/python/stacks/2.3/configs/default.json |  9 ++++-
 7 files changed, 64 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/1d9f1bb0/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/configuration/application-properties.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/configuration/application-properties.xml b/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/configuration/application-properties.xml
index 82dacb6..74b1537 100644
--- a/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/configuration/application-properties.xml
+++ b/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/configuration/application-properties.xml
@@ -131,6 +131,41 @@
     <description></description>
   </property>
   <property>
+    <name>atlas.notification.embedded</name>
+    <value>false</value>
+    <description>Indicates whether or not the notification service should be embedded.</description>
+  </property>
+  <property>
+    <name>atlas.kafka.data</name>
+    <value>{{metadata_home}}/data/kafka</value>
+    <description>The Kafka data directory.</description>
+  </property>
+  <property>
+    <name>atlas.kafka.bootstrap.servers</name>
+    <value>{{kafka_bootstrap_servers}}</value>
+    <description>Comma separated list of Kafka broker endpoints in host:port form</description>
+  </property>
+  <property>
+    <name>atlas.kafka.zookeeper.connect</name>
+    <value>{{kafka_zookeeper_connect}}</value>
+    <description>Comma separated list of servers forming Zookeeper quorum used by Kafka.</description>
+  </property>
+  <property>
+    <name>atlas.kafka.hook.group.id</name>
+    <value>atlas</value>
+    <description>Kafka group id for the hook topic.</description>
+  </property>
+  <property>
+    <name>atlas.kafka.entities.group.id</name>
+    <value>entities</value>
+    <description>Kafka group id for the entity topic.</description>
+  </property>
+  <property>
+    <name>atlas.cluster.name</name>
+    <value>{{cluster_name}}</value>
+    <description>The cluster name.</description>
+  </property>
+  <property>
     <name>atlas.server.http.port</name>
     <value>21000</value>
     <description></description>
@@ -140,5 +175,4 @@
     <value>21443</value>
     <description></description>
   </property>
-
 </configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/1d9f1bb0/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/metainfo.xml b/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/metainfo.xml
index 2600fc4..719f92c 100644
--- a/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/metainfo.xml
+++ b/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/metainfo.xml
@@ -92,6 +92,10 @@
         <timeout>300</timeout>
       </commandScript>
 
+      <requiredServices>
+        <service>KAFKA</service>
+      </requiredServices>
+
       <configuration-dependencies>
         <config-type>application-properties</config-type>
         <config-type>atlas-env</config-type>

http://git-wip-us.apache.org/repos/asf/ambari/blob/1d9f1bb0/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/params.py b/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/params.py
index 1a0c67b..eb2d816 100644
--- a/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/params.py
@@ -28,6 +28,8 @@ import status_params
 # server configurations
 config = Script.get_config()
 
+cluster_name = config['clusterName']
+
 # security enabled
 security_enabled = status_params.security_enabled
 
@@ -112,3 +114,12 @@ if security_enabled:
     smoke_cmd = format('curl --negotiate -u : -b ~/cookiejar.txt -c ~/cookiejar.txt -s -o /dev/null -w "%{{http_code}}" http://{metadata_host}:{metadata_port}/')
 else:
     smoke_cmd = format('curl -s -o /dev/null -w "%{{http_code}}" http://{metadata_host}:{metadata_port}/')
+
+# kafka
+kafka_bootstrap_servers = ""
+kafka_broker_hosts = config['clusterHostInfo']['kafka_broker_hosts']
+if not len(kafka_broker_hosts) == 0:
+  kafka_broker_port = default("/configurations/kafka-broker/port", 6667)
+  kafka_bootstrap_servers = kafka_broker_hosts[0] + ":" + str(kafka_broker_port)
+
+kafka_zookeeper_connect = default("/configurations/kafka-broker/zookeeper.connect", None)

http://git-wip-us.apache.org/repos/asf/ambari/blob/1d9f1bb0/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/configuration/hive-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/configuration/hive-env.xml b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/configuration/hive-env.xml
index 281bb41..7aa4a35 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/configuration/hive-env.xml
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/configuration/hive-env.xml
@@ -226,6 +226,8 @@ else
 fi
 export METASTORE_PORT={{hive_metastore_port}}
 
+export HADOOP_CLASSPATH={{atlas_conf_dir}}:{{atlas_home_dir}}/hook/hive:${HADOOP_CLASSPATH}
+
     </value>
     <value-attributes>
       <type>content</type>

http://git-wip-us.apache.org/repos/asf/ambari/blob/1d9f1bb0/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
index cd3741d..dc17dba 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
@@ -398,6 +398,8 @@ atlas_plugin_package = "atlas-metadata*-hive-plugin"
 atlas_ubuntu_plugin_package = "atlas-metadata.*-hive-plugin"
 
 if has_atlas:
+  atlas_home_dir = os.environ['METADATA_HOME_DIR'] if 'METADATA_HOME_DIR' in os.environ else '/usr/hdp/current/atlas-server'
+  atlas_conf_dir = os.environ['METADATA_CONF'] if 'METADATA_CONF' in os.environ else '/etc/atlas/conf'
   # client.properties
   atlas_client_props = {}
   auth_enabled = config['configurations']['application-properties'].get(

http://git-wip-us.apache.org/repos/asf/ambari/blob/1d9f1bb0/ambari-server/src/main/resources/stacks/HDP/2.3/services/HIVE/configuration/hive-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/services/HIVE/configuration/hive-env.xml b/ambari-server/src/main/resources/stacks/HDP/2.3/services/HIVE/configuration/hive-env.xml
index 92c0c03..1f2ca96 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/services/HIVE/configuration/hive-env.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/services/HIVE/configuration/hive-env.xml
@@ -69,6 +69,8 @@ fi
 
 export METASTORE_PORT={{hive_metastore_port}}
 
+export HADOOP_CLASSPATH={{atlas_conf_dir}}:{{atlas_home_dir}}/hook/hive:${HADOOP_CLASSPATH}
+
 {% if sqla_db_used or lib_dir_available %}
 export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:{{jdbc_libs_dir}}"
 export JAVA_LIBRARY_PATH="$JAVA_LIBRARY_PATH:{{jdbc_libs_dir}}"

http://git-wip-us.apache.org/repos/asf/ambari/blob/1d9f1bb0/ambari-server/src/test/python/stacks/2.3/configs/default.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.3/configs/default.json b/ambari-server/src/test/python/stacks/2.3/configs/default.json
index 21bff13..a36455c 100644
--- a/ambari-server/src/test/python/stacks/2.3/configs/default.json
+++ b/ambari-server/src/test/python/stacks/2.3/configs/default.json
@@ -164,6 +164,7 @@
         "log.retention.hours": "168"
       },
       "application-properties": {
+        "atlas.cluster.name" : "c2",
         "atlas.graph.storage.backend": "berkeleyje",
         "atlas.graph.storage.directory": "data/berkley",
         "atlas.graph.index.search.backend": "elasticsearch",
@@ -185,7 +186,13 @@
         "atlas.http.authentication.kerberos.principal": "HTTP/_HOST@EXAMPLE.COM",
         "atlas.http.authentication.kerberos.keytab": "/etc/security/keytabs/spnego.service.keytab",
         "atlas.http.authentication.kerberos.name.rules": "DEFAULT",
-        "atlas.server.http.port" : "21000"
+        "atlas.server.http.port" : "21000",
+        "atlas.notification.embedded" : false,
+        "atlas.kafka.bootstrap.servers" : "c6401.ambari.apache.org:6667",
+        "atlas.kafka.data" : "/usr/hdp/current/atlas-server/data/kafka",
+        "atlas.kafka.entities.group.id" : "entities",
+        "atlas.kafka.hook.group.id" : "atlas",
+        "atlas.kafka.zookeeper.connect" : "c6401.ambari.apache.org:2181"
       },
       "atlas-env": {
         "content": "# The java implementation to use. If JAVA_HOME is not found we expect java and jar to be in path\nexport JAVA_HOME={{java64_home}}\n# any additional java opts you want to set. This will apply to both client and server operations\nexport METADATA_OPTS={{metadata_opts}}\n# metadata configuration directory\nexport METADATA_CONF={{conf_dir}}\n# Where log files are stored. Defatult is logs directory under the base install location\nexport METADATA_LOG_DIR={{log_dir}}\n# additional classpath entries\nexport METADATACPPATH={{metadata_classpath}}\n# data dir\nexport METADATA_DATA_DIR={{data_dir}}\n# Where do you want to expand the war file. By Default it is in /server/webapp dir under the base install dir.\nexport METADATA_EXPANDED_WEBAPP_DIR={{expanded_war_dir}}",