You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by nc...@apache.org on 2016/02/01 16:41:38 UTC

[01/39] ambari git commit: AMBARI-14810 When their are multiple Job History Servers in the cluster, QuickLinks should show the URL for all instances. (atkach)

Repository: ambari
Updated Branches:
  refs/heads/branch-dev-patch-upgrade e7bdb7b05 -> 73aee31ef


AMBARI-14810 When their are multiple Job History Servers in the cluster, QuickLinks should show the URL for all instances. (atkach)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/54311b78
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/54311b78
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/54311b78

Branch: refs/heads/branch-dev-patch-upgrade
Commit: 54311b78863219e5702873e7d9ac94a942210ea9
Parents: f4edad8
Author: Andrii Tkach <at...@hortonworks.com>
Authored: Wed Jan 27 15:35:40 2016 +0200
Committer: Andrii Tkach <at...@hortonworks.com>
Committed: Wed Jan 27 15:35:40 2016 +0200

----------------------------------------------------------------------
 ambari-web/app/views/common/quick_view_link_view.js  | 3 +++
 ambari-web/test/views/common/quick_link_view_test.js | 5 +++++
 2 files changed, 8 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/54311b78/ambari-web/app/views/common/quick_view_link_view.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views/common/quick_view_link_view.js b/ambari-web/app/views/common/quick_view_link_view.js
index 5b26201..c2f7dcc 100644
--- a/ambari-web/app/views/common/quick_view_link_view.js
+++ b/ambari-web/app/views/common/quick_view_link_view.js
@@ -538,6 +538,9 @@ App.QuickViewLinks = Em.View.extend({
       case "ATLAS":
         hosts = this.findHosts('ATLAS_SERVER', response);
         break;
+      case "MAPREDUCE2":
+        hosts = this.findHosts('HISTORYSERVER', response);
+        break;
       default:
         if (this.getWithDefault('content.hostComponents', []).someProperty('isMaster')) {
           hosts = this.findHosts(this.get('content.hostComponents').findProperty('isMaster').get('componentName'), response);

http://git-wip-us.apache.org/repos/asf/ambari/blob/54311b78/ambari-web/test/views/common/quick_link_view_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/views/common/quick_link_view_test.js b/ambari-web/test/views/common/quick_link_view_test.js
index f619e87..93a235c 100644
--- a/ambari-web/test/views/common/quick_link_view_test.js
+++ b/ambari-web/test/views/common/quick_link_view_test.js
@@ -945,6 +945,11 @@ describe('App.QuickViewLinks', function () {
       expect(quickViewLinks.findHosts.calledWith('ATLAS_SERVER', {})).to.be.true;
     });
 
+    it("MAPREDUCE2 service", function() {
+      expect(quickViewLinks.getHosts({}, 'MAPREDUCE2')).to.eql(['host1']);
+      expect(quickViewLinks.findHosts.calledWith('HISTORYSERVER', {})).to.be.true;
+    });
+
     it("custom service without master", function() {
       expect(quickViewLinks.getHosts({}, 'S1')).to.be.empty;
     });


[38/39] ambari git commit: AMBARI-14847 - Concurrent kinit Commands Cause Alerts To Randomly Trigger (jonathanhurley)

Posted by nc...@apache.org.
AMBARI-14847 - Concurrent kinit Commands Cause Alerts To Randomly Trigger (jonathanhurley)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/3ab6a3a8
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/3ab6a3a8
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/3ab6a3a8

Branch: refs/heads/branch-dev-patch-upgrade
Commit: 3ab6a3a8365b31417f8afd83b668d2986702246b
Parents: d8804cf
Author: Jonathan Hurley <jh...@hortonworks.com>
Authored: Fri Jan 29 14:19:06 2016 -0500
Committer: Jonathan Hurley <jh...@hortonworks.com>
Committed: Mon Feb 1 10:09:08 2016 -0500

----------------------------------------------------------------------
 .../resource_management/core/global_lock.py     | 46 ++++++++++++++++
 .../libraries/functions/curl_krb_request.py     | 47 ++++++++++-------
 .../libraries/functions/hive_check.py           | 23 ++++----
 .../package/alerts/alert_hive_metastore.py      | 15 ++++--
 .../package/alerts/alert_webhcat_server.py      |  6 ---
 .../package/alerts/alert_check_oozie_server.py  | 15 ++++--
 ambari-server/src/test/python/TestGlobalLock.py | 55 ++++++++++++++++++++
 7 files changed, 165 insertions(+), 42 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/3ab6a3a8/ambari-common/src/main/python/resource_management/core/global_lock.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/core/global_lock.py b/ambari-common/src/main/python/resource_management/core/global_lock.py
new file mode 100644
index 0000000..72904c8
--- /dev/null
+++ b/ambari-common/src/main/python/resource_management/core/global_lock.py
@@ -0,0 +1,46 @@
+#!/usr/bin/env python
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+Ambari Agent
+
+"""
+import threading
+from resource_management.core.exceptions import Fail
+
+# concurrent kinit's can cause the following error:
+# Internal credentials cache error while storing credentials while getting initial credentials
+LOCK_TYPE_KERBEROS = "KERBEROS_LOCK"
+
+# dictionary of all global lock instances
+__GLOBAL_LOCKS = {
+  LOCK_TYPE_KERBEROS : threading.RLock()
+}
+
+def get_lock(lock_type):
+  """
+  Gets the global lock associated with the specified type. This does not actually acquire
+  the lock, it simply returns the RLock instance. It is up to the caller to invoke RLock.acquire()
+  and RLock.release() correctly.
+  :param lock_type:
+  :return: a global threading.RLock() instance
+  :rtype: threading.RLock()
+  """
+  if lock_type not in __GLOBAL_LOCKS:
+    raise Fail("There is no global lock associated with {0}".format(str(lock_type)))
+
+  return __GLOBAL_LOCKS[lock_type]

http://git-wip-us.apache.org/repos/asf/ambari/blob/3ab6a3a8/ambari-common/src/main/python/resource_management/libraries/functions/curl_krb_request.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/libraries/functions/curl_krb_request.py b/ambari-common/src/main/python/resource_management/libraries/functions/curl_krb_request.py
index b42a8a3..1ccc45f 100644
--- a/ambari-common/src/main/python/resource_management/libraries/functions/curl_krb_request.py
+++ b/ambari-common/src/main/python/resource_management/libraries/functions/curl_krb_request.py
@@ -24,13 +24,15 @@ __all__ = ["curl_krb_request"]
 import logging
 import os
 import time
-import subprocess
+import threading
 
+from resource_management.core import global_lock
 from resource_management.core import shell
 from resource_management.core.exceptions import Fail
 from get_kinit_path import get_kinit_path
 from get_klist_path import get_klist_path
 from resource_management.libraries.functions.get_user_call_output import get_user_call_output
+
 # hashlib is supplied as of Python 2.5 as the replacement interface for md5
 # and other secure hashes.  In 2.6, md5 is deprecated.  Import hashlib if
 # available, avoiding a deprecation warning under 2.6.  Import md5 otherwise,
@@ -47,7 +49,6 @@ MAX_TIMEOUT_DEFAULT = CONNECTION_TIMEOUT_DEFAULT + 2
 
 logger = logging.getLogger()
 
-
 def curl_krb_request(tmp_dir, keytab, principal, url, cache_file_prefix,
     krb_exec_search_paths, return_only_http_code, alert_name, user,
     connection_timeout = CONNECTION_TIMEOUT_DEFAULT):
@@ -62,25 +63,33 @@ def curl_krb_request(tmp_dir, keytab, principal, url, cache_file_prefix,
   ccache_file_path = "{0}{1}{2}_{3}_cc_{4}".format(tmp_dir, os.sep, cache_file_prefix, user, ccache_file_name)
   kerberos_env = {'KRB5CCNAME': ccache_file_path}
 
-  # If there are no tickets in the cache or they are expired, perform a kinit, else use what
-  # is in the cache
-  if krb_exec_search_paths:
-    klist_path_local = get_klist_path(krb_exec_search_paths)
-  else:
-    klist_path_local = get_klist_path()
-
-  if shell.call("{0} -s {1}".format(klist_path_local, ccache_file_path), user=user)[0] != 0:
+  # concurrent kinit's can cause the following error:
+  # Internal credentials cache error while storing credentials while getting initial credentials
+  kinit_lock = global_lock.get_lock(global_lock.LOCK_TYPE_KERBEROS)
+  kinit_lock.acquire()
+  try:
+    # If there are no tickets in the cache or they are expired, perform a kinit, else use what
+    # is in the cache
     if krb_exec_search_paths:
-      kinit_path_local = get_kinit_path(krb_exec_search_paths)
+      klist_path_local = get_klist_path(krb_exec_search_paths)
     else:
-      kinit_path_local = get_kinit_path()
-    logger.debug("[Alert][{0}] Enabling Kerberos authentication via GSSAPI using ccache at {1}.".format(
-      alert_name, ccache_file_path))
-
-    shell.checked_call("{0} -l 5m -c {1} -kt {2} {3} > /dev/null".format(kinit_path_local, ccache_file_path, keytab, principal), user=user)
-  else:
-    logger.debug("[Alert][{0}] Kerberos authentication via GSSAPI already enabled using ccache at {1}.".format(
-      alert_name, ccache_file_path))
+      klist_path_local = get_klist_path()
+
+    if shell.call("{0} -s {1}".format(klist_path_local, ccache_file_path), user=user)[0] != 0:
+      if krb_exec_search_paths:
+        kinit_path_local = get_kinit_path(krb_exec_search_paths)
+      else:
+        kinit_path_local = get_kinit_path()
+
+      logger.debug("[Alert][{0}] Enabling Kerberos authentication via GSSAPI using ccache at {1}.".format(
+        alert_name, ccache_file_path))
+
+      shell.checked_call("{0} -l 5m -c {1} -kt {2} {3} > /dev/null".format(kinit_path_local, ccache_file_path, keytab, principal), user=user)
+    else:
+      logger.debug("[Alert][{0}] Kerberos authentication via GSSAPI already enabled using ccache at {1}.".format(
+        alert_name, ccache_file_path))
+  finally:
+    kinit_lock.release()
 
   # check if cookies dir exists, if not then create it
   cookies_dir = os.path.join(tmp_dir, "cookies")

http://git-wip-us.apache.org/repos/asf/ambari/blob/3ab6a3a8/ambari-common/src/main/python/resource_management/libraries/functions/hive_check.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/libraries/functions/hive_check.py b/ambari-common/src/main/python/resource_management/libraries/functions/hive_check.py
index aacb176..ebcf4f9 100644
--- a/ambari-common/src/main/python/resource_management/libraries/functions/hive_check.py
+++ b/ambari-common/src/main/python/resource_management/libraries/functions/hive_check.py
@@ -18,9 +18,7 @@ See the License for the specific language governing permissions and
 limitations under the License.
 '''
 
-import socket
-
-from resource_management.core.exceptions import Fail
+from resource_management.core import global_lock
 from resource_management.core.resources import Execute
 from resource_management.libraries.functions import format
 
@@ -55,15 +53,22 @@ def check_thrift_port_sasl(address, port, hive_auth="NOSASL", key=None, kinitcmd
   if ssl and ssl_keystore is not None and ssl_password is not None:
     beeline_url.extend(['ssl={ssl_str}', 'sslTrustStore={ssl_keystore}', 'trustStorePassword={ssl_password!p}'])
 
-  # append url according to kerberos setting
+  # append url according to principal and execute kinit
   if kinitcmd:
     beeline_url.append('principal={key}')
-    Execute(kinitcmd, user=smokeuser)
+
+    # prevent concurrent kinit
+    kinit_lock = global_lock.get_lock(global_lock.LOCK_TYPE_KERBEROS)
+    kinit_lock.acquire()
+    try:
+      Execute(kinitcmd, user=smokeuser)
+    finally:
+      kinit_lock.release()
 
   cmd = "! beeline -u '%s' -e '' 2>&1| awk '{print}'|grep -i -e 'Connection refused' -e 'Invalid URL'" % \
         format(";".join(beeline_url))
+
   Execute(cmd,
-          user=smokeuser,
-          path=["/bin/", "/usr/bin/", "/usr/lib/hive/bin/", "/usr/sbin/"],
-          timeout=check_command_timeout
-  )
+    user=smokeuser,
+    path=["/bin/", "/usr/bin/", "/usr/lib/hive/bin/", "/usr/sbin/"],
+    timeout=check_command_timeout)

http://git-wip-us.apache.org/repos/asf/ambari/blob/3ab6a3a8/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/alerts/alert_hive_metastore.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/alerts/alert_hive_metastore.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/alerts/alert_hive_metastore.py
index dbf0600..42485b6 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/alerts/alert_hive_metastore.py
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/alerts/alert_hive_metastore.py
@@ -24,6 +24,7 @@ import time
 import traceback
 import logging
 
+from resource_management.core import global_lock
 from resource_management.libraries.functions import format
 from resource_management.libraries.functions import get_kinit_path
 from resource_management.core.resources import Execute
@@ -145,13 +146,19 @@ def execute(configurations={}, parameters={}, host_name=None):
         kerberos_executable_search_paths = configurations[KERBEROS_EXECUTABLE_SEARCH_PATHS_KEY]
       else:
         kerberos_executable_search_paths = None
-             
+
       kinit_path_local = get_kinit_path(kerberos_executable_search_paths)
       kinitcmd=format("{kinit_path_local} -kt {smokeuser_keytab} {smokeuser_principal}; ")
 
-      Execute(kinitcmd, user=smokeuser,
-        path=["/bin/", "/usr/bin/", "/usr/lib/hive/bin/", "/usr/sbin/"],
-        timeout=10)
+      # prevent concurrent kinit
+      kinit_lock = global_lock.get_lock(global_lock.LOCK_TYPE_KERBEROS)
+      kinit_lock.acquire()
+      try:
+        Execute(kinitcmd, user=smokeuser,
+          path=["/bin/", "/usr/bin/", "/usr/lib/hive/bin/", "/usr/sbin/"],
+          timeout=10)
+      finally:
+        kinit_lock.release()
 
     if host_name is None:
       host_name = socket.getfqdn()

http://git-wip-us.apache.org/repos/asf/ambari/blob/3ab6a3a8/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/alerts/alert_webhcat_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/alerts/alert_webhcat_server.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/alerts/alert_webhcat_server.py
index 1e95703..b49fd6e 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/alerts/alert_webhcat_server.py
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/alerts/alert_webhcat_server.py
@@ -26,13 +26,7 @@ import traceback
 import logging
 
 from resource_management.core.environment import Environment
-from resource_management.core.resources import Execute
-from resource_management.core import shell
-from resource_management.libraries.functions import format
-from resource_management.libraries.functions import get_kinit_path
-from resource_management.libraries.functions import get_klist_path
 from resource_management.libraries.functions.curl_krb_request import curl_krb_request
-from os import getpid, sep
 
 RESULT_CODE_OK = "OK"
 RESULT_CODE_CRITICAL = "CRITICAL"

http://git-wip-us.apache.org/repos/asf/ambari/blob/3ab6a3a8/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/alerts/alert_check_oozie_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/alerts/alert_check_oozie_server.py b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/alerts/alert_check_oozie_server.py
index fcc2d49..90851c8 100644
--- a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/alerts/alert_check_oozie_server.py
+++ b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/alerts/alert_check_oozie_server.py
@@ -17,17 +17,18 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 See the License for the specific language governing permissions and
 limitations under the License.
 """
+import os
+import re
+
+from resource_management.core import global_lock
 from resource_management.core.environment import Environment
 from resource_management.core.resources import Execute
-from resource_management.core.shell import call
 from resource_management.libraries.functions import format
 from resource_management.libraries.functions import get_kinit_path
 from resource_management.libraries.functions import get_klist_path
 from ambari_commons.os_check import OSConst, OSCheck
 from ambari_commons.os_family_impl import OsFamilyFuncImpl, OsFamilyImpl
 from urlparse import urlparse
-import os
-import re
 
 RESULT_CODE_OK = 'OK'
 RESULT_CODE_CRITICAL = 'CRITICAL'
@@ -143,7 +144,13 @@ def get_check_command(oozie_url, host_name, configurations, parameters, only_kin
     else:
       kinit_command = "{0} -s {1} || ".format(klist_path_local, ccache_file) + kinit_part_command
 
-    Execute(kinit_command, environment=kerberos_env, user=user)
+    # prevent concurrent kinit
+    kinit_lock = global_lock.get_lock(global_lock.LOCK_TYPE_KERBEROS)
+    kinit_lock.acquire()
+    try:
+      Execute(kinit_command, environment=kerberos_env, user=user)
+    finally:
+      kinit_lock.release()
 
   # oozie configuration directory uses a symlink when > HDP 2.2
   oozie_config_directory = OOZIE_CONF_DIR_LEGACY

http://git-wip-us.apache.org/repos/asf/ambari/blob/3ab6a3a8/ambari-server/src/test/python/TestGlobalLock.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/TestGlobalLock.py b/ambari-server/src/test/python/TestGlobalLock.py
new file mode 100644
index 0000000..3851de6
--- /dev/null
+++ b/ambari-server/src/test/python/TestGlobalLock.py
@@ -0,0 +1,55 @@
+# !/usr/bin/env python
+
+'''
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+'''
+
+from resource_management.core import global_lock
+from resource_management.core.exceptions import Fail
+
+from unittest import TestCase
+
+utils = __import__('ambari_server.utils').utils
+
+class TestGlobalLock(TestCase):
+  def test_get_invalid_lock(self):
+    """
+    Tests that an invalid lock throws an exception
+    :return:
+    """
+    try:
+      global_lock.get_lock("INVALID")
+      self.fail("Expected an exception when trying to retrieve an invalid lock")
+    except Fail:
+      pass
+
+  def test_get_kerberos_lock(self):
+    """
+    Tests that the kerberos lock can be retrieved.
+    :return:
+    """
+    kerberos_lock = global_lock.get_lock(global_lock.LOCK_TYPE_KERBEROS)
+    self.assertFalse(kerberos_lock is None)
+
+    kerberos_lock_2 = global_lock.get_lock(global_lock.LOCK_TYPE_KERBEROS)
+    self.assertEqual(kerberos_lock, kerberos_lock_2)
+
+    kerberos_lock.acquire()
+    kerberos_lock.release()
+
+    kerberos_lock_2.acquire()
+    kerberos_lock_2.release()
\ No newline at end of file


[19/39] ambari git commit: AMBARI-14794: Stop HAWQ Cluster Immediate command should be disabled if HAWQMASTER is stopped (adenissov via jaoki)

Posted by nc...@apache.org.
AMBARI-14794: Stop HAWQ Cluster Immediate command should be disabled if HAWQMASTER is stopped (adenissov via jaoki)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/d6346993
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/d6346993
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/d6346993

Branch: refs/heads/branch-dev-patch-upgrade
Commit: d63469935d0933af5e625cfea1d581f056055685
Parents: b037ef5
Author: Jun Aoki <ja...@apache.org>
Authored: Thu Jan 28 17:42:04 2016 -0800
Committer: Jun Aoki <ja...@apache.org>
Committed: Thu Jan 28 17:42:04 2016 -0800

----------------------------------------------------------------------
 ambari-web/app/models/host_component.js | 3 ++-
 1 file changed, 2 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/d6346993/ambari-web/app/models/host_component.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/models/host_component.js b/ambari-web/app/models/host_component.js
index 77395f2..b205d38 100644
--- a/ambari-web/app/models/host_component.js
+++ b/ambari-web/app/models/host_component.js
@@ -269,6 +269,7 @@ App.HostComponentActionMap = {
     var NN = ctx.get('controller.content.hostComponents').findProperty('componentName', 'NAMENODE');
     var RM = ctx.get('controller.content.hostComponents').findProperty('componentName', 'RESOURCEMANAGER');
     var RA = ctx.get('controller.content.hostComponents').findProperty('componentName', 'RANGER_ADMIN');
+    var HM = ctx.get('controller.content.hostComponents').findProperty('componentName', 'HAWQMASTER');
     var HS = ctx.get('controller.content.hostComponents').findProperty('componentName', 'HAWQSTANDBY');
     return {
       RESTART_ALL: {
@@ -378,7 +379,7 @@ App.HostComponentActionMap = {
         context: Em.I18n.t('services.service.actions.run.immediateStopHawqCluster.context'),
         label: Em.I18n.t('services.service.actions.run.immediateStopHawqCluster.context'),
         cssClass: 'icon-stop',
-        disabled: false
+        disabled: !HM || HM.get('workStatus') != App.HostComponentStatus.started
       },
       IMMEDIATE_STOP: {
         customCommand: 'IMMEDIATE_STOP',


[07/39] ambari git commit: AMBARI-14818. Incorrect markup of some Accumulo's metrics widgets (alexantonenko)

Posted by nc...@apache.org.
AMBARI-14818. Incorrect markup of some Accumulo's metrics widgets (alexantonenko)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/75be3b97
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/75be3b97
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/75be3b97

Branch: refs/heads/branch-dev-patch-upgrade
Commit: 75be3b97a7ca771f7d2d74aea92d3c017c354159
Parents: 646fb42
Author: Alex Antonenko <hi...@gmail.com>
Authored: Wed Jan 27 19:33:51 2016 +0200
Committer: Alex Antonenko <hi...@gmail.com>
Committed: Thu Jan 28 02:40:06 2016 +0200

----------------------------------------------------------------------
 .../main/resources/stacks/HDP/2.3/services/ACCUMULO/widgets.json | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/75be3b97/ambari-server/src/main/resources/stacks/HDP/2.3/services/ACCUMULO/widgets.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/services/ACCUMULO/widgets.json b/ambari-server/src/main/resources/stacks/HDP/2.3/services/ACCUMULO/widgets.json
index 8bdbc22..fd919e8 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/services/ACCUMULO/widgets.json
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/services/ACCUMULO/widgets.json
@@ -83,7 +83,7 @@
             }
           ],
           "properties": {
-            "display_unit": "compactions",
+            "display_unit": "MinCs",
             "graph_type": "LINE",
             "time_range": "1"
           }
@@ -118,7 +118,7 @@
             }
           ],
           "properties": {
-            "display_unit": "compactions",
+            "display_unit": "MajCs",
             "graph_type": "LINE",
             "time_range": "1"
           }


[35/39] ambari git commit: AMBARI-14861. [Ambari tarballs] Package ambari-server to tar.gz (make common definition for deb, rpm, tar.gz) (aonishuk)

Posted by nc...@apache.org.
AMBARI-14861. [Ambari tarballs] Package ambari-server to tar.gz (make common definition for deb,rpm,tar.gz) (aonishuk)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/2fd458e7
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/2fd458e7
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/2fd458e7

Branch: refs/heads/branch-dev-patch-upgrade
Commit: 2fd458e720c3d52aa4bebc82e4229dad721f9ce0
Parents: 60db82e
Author: Andrew Onishuk <ao...@hortonworks.com>
Authored: Mon Feb 1 15:57:02 2016 +0200
Committer: Andrew Onishuk <ao...@hortonworks.com>
Committed: Mon Feb 1 15:57:02 2016 +0200

----------------------------------------------------------------------
 ambari-server/pom.xml                        | 883 +---------------------
 ambari-server/src/main/assemblies/server.xml | 348 +++++++--
 2 files changed, 304 insertions(+), 927 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/2fd458e7/ambari-server/pom.xml
----------------------------------------------------------------------
diff --git a/ambari-server/pom.xml b/ambari-server/pom.xml
index 9b35c46..c601baa 100644
--- a/ambari-server/pom.xml
+++ b/ambari-server/pom.xml
@@ -268,7 +268,7 @@
       <plugin>
         <groupId>org.codehaus.mojo</groupId>
         <artifactId>rpm-maven-plugin</artifactId>
-        <version>2.0.1</version>
+        <version>2.1.4</version>
         <executions>
           <execution>
             <!-- unbinds rpm creation from maven lifecycle -->
@@ -305,406 +305,36 @@
             <scriptFile>src/main/package/rpm/posttrans_server.sh</scriptFile>
             <fileEncoding>utf-8</fileEncoding>
           </posttransScriptlet>
-          <defaultFilemode>644</defaultFilemode>
-          <defaultDirmode>755</defaultDirmode>
-          <defaultUsername>root</defaultUsername>
-          <defaultGroupname>root</defaultGroupname>
           <needarch>x86_64</needarch>
           <mappings>
             <mapping>
-              <directory>/usr/lib/ambari-server</directory>
-              <dependency>
-              </dependency>
-            </mapping>
-            <mapping>
-              <directory>/usr/lib/ambari-server/web</directory>
-              <sources>
-                <source>
-                  <location>${ambari-web-dir}</location>
-                  <includes>
-                    <include>**</include>
-                  </includes>
-                </source>
-              </sources>
-            </mapping>
-            <mapping>
-              <directory>/usr/lib/ambari-server</directory>
-              <sources>
-                <source>
-                  <location>${project.build.directory}/${project.artifactId}-${project.version}.jar</location>
-                </source>
-              </sources>
-            </mapping>
-            <mapping>
-              <directory>${ambari_commons.install.dir}</directory>
-              <sources>
-                <source>
-                  <location>
-                    ${project.basedir}/../ambari-common/src/main/python/ambari_commons
-                  </location>
-                </source>
-              </sources>
-            </mapping>
-            <mapping>
-              <directory>${resource_management.install.dir}</directory>
-              <sources>
-                <source>
-                  <location>
-                    ${resourceManagementSrcLocation}
-                  </location>
-                </source>
-              </sources>
-            </mapping>
-            <mapping>
-              <directory>${jinja.install.dir}</directory>
-              <filemode>755</filemode>
-              <username>root</username>
-              <groupname>root</groupname>
-              <sources>
-                <source>
-                  <location>${project.basedir}/../ambari-common/src/main/python/ambari_jinja2/ambari_jinja2</location>
-                  <excludes>
-                    <exclude>${project.basedir}/../ambari-common/src/main/python/ambari_jinja2/ambari_jinja2/testsuite</exclude>
-                  </excludes>
-                </source>
-              </sources>
-            </mapping>
-            <mapping>
-              <directory>${simplejson.install.dir}</directory>
-              <filemode>755</filemode>
-              <username>root</username>
-              <groupname>root</groupname>
-              <sources>
-                <source>
-                  <location>${project.basedir}/../ambari-common/src/main/python/ambari_simplejson</location>
-                </source>
-              </sources>
-            </mapping>
-            <mapping>
-              <directory>/usr/sbin</directory>
-              <filemode>755</filemode>
-              <username>root</username>
-              <groupname>root</groupname>
-              <directoryIncluded>false</directoryIncluded>
-              <sources>
-                <source>
-                  <location>src/main/python/ambari-server.py</location>
-                </source>
-                <source>
-                  <location>src/main/python/ambari_server_main.py</location>
-                </source>
-              </sources>
-            </mapping>
-            <mapping>
-              <directory>/etc/init.d</directory>
-              <filemode>755</filemode>
-              <username>root</username>
-              <groupname>root</groupname>
-              <directoryIncluded>false</directoryIncluded>
-              <sources>
-                <source>
-                  <location>sbin/ambari-server</location>
-                  <filter>true</filter>
-                </source>
-              </sources>
-            </mapping>
-            <mapping>
-              <directory>/var/lib/ambari-server/</directory>
-              <filemode>755</filemode>
-              <username>root</username>
-              <groupname>root</groupname>
-              <directoryIncluded>false</directoryIncluded>
-              <sources>
-                <source>
-                  <location>../ambari-common/src/main/unix/ambari-python-wrap</location>
-                </source>
-              </sources>
-            </mapping>
-            <mapping>
-              <directory>/etc/ambari-server/conf</directory>
-              <configuration>true</configuration>
-              <sources>
-                <source>
-                  <location>${ambariProperties}</location>
-                </source>
-                <source>
-                  <location>conf/unix/log4j.properties</location>
-                </source>
-                <source>
-                  <location>conf/unix/krb5JAASLogin.conf</location>
-                </source>
-              </sources>
-            </mapping>
-            <mapping>
-              <directory>/var/lib/ambari-server/</directory>
-              <configuration>true</configuration>
-              <filemode>700</filemode>
-              <username>root</username>
-              <groupname>root</groupname>
-              <sources>
-                <source>
-                  <location>conf/unix/ambari-env.sh</location>
-                </source>
-              </sources>
-            </mapping>
-            <mapping>
-              <directory>/var/lib/ambari-server/</directory>
-              <filemode>700</filemode>
-              <username>root</username>
-              <groupname>root</groupname>
-              <sources>
-                <source>
-                  <location>conf/unix/ambari-sudo.sh</location>
-                </source>
-              </sources>
-            </mapping>
-            <mapping>
-              <directory>/var/lib/ambari-server/</directory>
-              <filemode>700</filemode>
-              <username>root</username>
-              <groupname>root</groupname>
-              <sources>
-                <source>
-                  <location>conf/unix/install-helper.sh</location>
-                </source>
-              </sources>
-            </mapping>
-            <mapping>
-              <directory>/var/lib/ambari-server/keys</directory>
-              <sources>
-                <source>
-                  <location>conf/unix/ca.config</location>
-                </source>
-              </sources>
-            </mapping>
-            <mapping>
-              <directory>/var/lib/ambari-server/keys/db</directory>
-              <filemode>700</filemode>
-              <username>root</username>
-              <groupname>root</groupname>
-              <sources>
-                <source>
-                  <location>src/main/resources/db</location>
-                </source>
-              </sources>
-            </mapping>
-            <mapping>
-              <directory>/var/run/ambari-server/bootstrap</directory>
-            </mapping>
-            <mapping>
-              <directory>/var/run/ambari-server/stack-recommendations</directory>
-            </mapping>
-            <mapping>
-              <directory>/var/log/ambari-server</directory>
-            </mapping>
-            <mapping>
-              <directory>/var/lib/ambari-server/resources</directory>
-              <sources>
-                <source>
-                  <location>target/classes/Ambari-DDL-Postgres-CREATE.sql</location>
-                </source>
-                <source>
-                  <location>src/main/resources/Ambari-DDL-Postgres-DROP.sql</location>
-                </source>
-                <source>
-                  <location>target/classes/Ambari-DDL-Postgres-EMBEDDED-CREATE.sql</location>
-                </source>
-                <source>
-                  <location>src/main/resources/Ambari-DDL-Postgres-EMBEDDED-DROP.sql</location>
-                </source>
-                <source>
-                  <location>target/classes/Ambari-DDL-Oracle-CREATE.sql</location>
-                </source>
-                <source>
-                  <location>target/classes/Ambari-DDL-MySQL-CREATE.sql</location>
-                </source>
-                <source>
-                  <location>src/main/resources/Ambari-DDL-Oracle-DROP.sql</location>
-                </source>
-                <source>
-                  <location>src/main/resources/Ambari-DDL-MySQL-DROP.sql</location>
-                </source>
-                <source>
-                  <location>target/classes/Ambari-DDL-SQLServer-CREATE.sql</location>
-                </source>
-                <source>
-                  <location>target/classes/Ambari-DDL-SQLServer-CREATELOCAL.sql</location>
-                </source>
-                <source>
-                  <location>src/main/resources/Ambari-DDL-SQLServer-DROP.sql</location>
-                </source>
-                <source>
-                  <location>target/classes/Ambari-DDL-SQLAnywhere-CREATE.sql</location>
-                </source>
-                <source>
-                  <location>src/main/resources/Ambari-DDL-SQLAnywhere-DROP.sql</location>
-                </source>
-                <source>
-                  <location>${project.build.directory}/DBConnectionVerification.jar</location>
-                </source>
-              </sources>
-            </mapping>
-            <mapping>
-              <directory>/var/lib/ambari-server/data/tmp</directory>
-              <filemode>755</filemode>
-              <username>root</username>
-              <groupname>root</groupname>
-            </mapping>
-            <mapping>
-              <directory>/var/lib/ambari-server/data/cache</directory>
-              <filemode>700</filemode>
-              <username>root</username>
-              <groupname>root</groupname>
-            </mapping>
-            <mapping>
-              <directory>/var/lib/ambari-server/resources/apps</directory>
-              <filemode>755</filemode>
-              <username>root</username>
-              <groupname>root</groupname>
-              <sources>
-                <source>
-                  <location>src/main/resources/slider_resources/README.txt</location>
-                </source>
-              </sources>
-            </mapping>
-            <mapping>
-              <directory>/var/lib/ambari-server/resources/scripts</directory>
-              <filemode>755</filemode>
-              <sources>
-                <source>
-                  <location>src/main/resources/scripts</location>
-                </source>
-                <source>
-                  <location>src/main/python/upgradeHelper.py</location>
-                </source>
-              </sources>
-            </mapping>
-            <mapping>
-              <directory>/var/lib/ambari-server/resources/views</directory>
-              <filemode>755</filemode>
-              <sources>
-                <source>
-                  <location>${ambari-admin-dir}/target</location>
-                  <includes>
-                    <include>*.jar</include>
-                  </includes>
-                </source>
-              </sources>
-            </mapping>
-            <mapping>
-              <directory>/var/lib/ambari-server/resources/upgrade</directory>
-            </mapping>
-            <mapping>
-              <directory>/var/lib/ambari-server/resources/upgrade/ddl</directory>
-              <sources>
-                <source>
-                  <location>src/main/resources/upgrade/ddl</location>
-                </source>
-              </sources>
-            </mapping>
-            <mapping>
-              <directory>/var/lib/ambari-server/resources/upgrade/dml</directory>
-              <sources>
-                <source>
-                  <location>src/main/resources/upgrade/dml</location>
-                </source>
-              </sources>
-            </mapping>
-            <mapping>
-              <directory>/var/lib/ambari-server/resources/common-services</directory>
-              <sources>
-                <source>
-                  <location>${commonServicesSrcLocation}</location>
-                </source>
-              </sources>
-            </mapping>
-            <mapping>
-              <directory>/var/lib/ambari-server/resources/upgrade/catalog</directory>
-              <sources>
-                <source>
-                  <location>src/main/resources/upgrade/catalog</location>
-                </source>
-              </sources>
-            </mapping>
-            <mapping>
-              <directory>/var/lib/ambari-server/resources/stacks/${stack.distribution}</directory>
-              <sources>
-                <source>
-                  <location>${stacksSrcLocation}</location>
-                </source>
-              </sources>
-            </mapping>
-            <mapping>
-              <directory>/var/lib/ambari-server/resources/stacks</directory>
-              <filemode>755</filemode>
-              <username>root</username>
-              <groupname>root</groupname>
-              <sources>
-                <source>
-                  <location>target/classes/stacks/stack_advisor.py</location>
-                </source>
-              </sources>
-            </mapping>
-            <mapping>
-              <directory>/usr/lib/python2.6/site-packages/ambari_server</directory>
-              <filemode>755</filemode>
+              <directory>/etc</directory>
               <username>root</username>
               <groupname>root</groupname>
+              <directoryIncluded>false</directoryIncluded> <!-- avoid managing /etc/init.d -->
               <sources>
                 <source>
-                  <location>src/main/python/ambari_server</location>
-                </source>
-                <source>
-                  <location>src/main/python/bootstrap.py</location>
-                </source>
-                <source>
-                  <location>src/main/python/setupAgent.py</location>
-                </source>
-                <source>
-                  <location>src/main/python/os_check_type.py</location>
-                </source>
-              </sources>
-            </mapping>
-            <mapping>
-              <directory>/var/run/ambari-server</directory>
-            </mapping>
-            <mapping>
-              <directory>/var/lib/ambari-server/resources</directory>
-              <sources>
-                <source>
-                  <location>../version</location>
-                  <filter>true</filter>
-                </source>
-              </sources>
-            </mapping>
-            <mapping>
-              <directory>/var/lib/ambari-server/resources/custom_action_definitions</directory>
-              <sources>
-                <source>
-                  <location>src/main/resources/custom_action_definitions</location>
+                  <location>${project.build.directory}${dirsep}${project.artifactId}-${project.version}-dist/etc</location>
                 </source>
               </sources>
             </mapping>
-            <mapping>
-              <!-- custom actions root-->
-              <directory>/var/lib/ambari-server/resources/custom_actions</directory>
-              <filemode>755</filemode>
+			<mapping>
+              <directory>/usr</directory>
               <username>root</username>
               <groupname>root</groupname>
               <sources>
                 <source>
-                  <location>${customActionsRoot}</location>
+                  <location>${project.build.directory}${dirsep}${project.artifactId}-${project.version}-dist/usr</location>
                 </source>
               </sources>
             </mapping>
             <mapping>
-              <directory>/var/lib/ambari-server/resources/host_scripts</directory>
-              <filemode>755</filemode>
+              <directory>/var</directory>
               <username>root</username>
               <groupname>root</groupname>
               <sources>
                 <source>
-                  <location>src/main/resources/host_scripts</location>
+                  <location>${project.build.directory}${dirsep}${project.artifactId}-${project.version}-dist/var</location>
                 </source>
               </sources>
             </mapping>
@@ -714,7 +344,7 @@
       <plugin>
         <groupId>org.vafer</groupId>
         <artifactId>jdeb</artifactId>
-        <version>1.0.1</version>
+        <version>1.4</version>
         <executions>
           <execution>
             <!-- unbinds rpm creation from maven lifecycle -->
@@ -726,498 +356,15 @@
         </executions>
         <configuration>
           <controlDir>${basedir}/src/main/package/deb/control</controlDir>
-          <deb>${basedir}/target/${project.artifactId}_${package-version}-${package-release}.deb</deb>
+          <deb>${basedir}/target/${project.artifactId}_${package-version}-${package-release}-dist.deb</deb>
+          <skip>false</skip>
+          <skipPOMs>false</skipPOMs>
           <dataSet>
             <data>
-              <type>template</type>
-              <paths>
-                <path>/usr/lib/ambari-server</path>
-                <path>/var/run/ambari-server</path>
-                <path>/var/run/ambari-server/bootstrap</path>
-                <path>/var/run/ambari-server/stack-recommendations</path>
-                <path>/var/log/ambari-server</path>
-                <path>/var/lib/ambari-server/resources/upgrade</path>
-                <path>/var/lib/ambari-server/data/tmp</path>
-                <path>/var/lib/ambari-server/data/cache</path>
-              </paths>
-            </data>
-            <!-- TODO: should be included all subdirs, if exists-->
-            <data>
-              <src>${basedir}/../ambari-web/public</src>
-              <type>directory</type>
-              <mapper>
-                <type>perm</type>
-                <prefix>/usr/lib/ambari-server/web</prefix>
-              </mapper>
-            </data>
-            <data>
-              <src>${project.build.directory}/${project.artifactId}-${project.version}-dist/${project.artifactId}-${project.version}/lib</src>
-              <type>directory</type>
-              <mapper>
-                <type>perm</type>
-                <prefix>/usr/lib</prefix>
-              </mapper>
-            </data>
-            <data>
-              <src>src/main/python/ambari-server.py</src>
-              <type>file</type>
-              <mapper>
-                <type>perm</type>
-                <prefix>/usr/sbin</prefix>
-                <user>root</user>
-                <group>root</group>
-                <filemode>755</filemode>
-              </mapper>
-            </data>
-            <data>
-              <src>src/main/python/ambari_server_main.py</src>
-              <type>file</type>
-              <mapper>
-                <type>perm</type>
-                <prefix>/usr/sbin</prefix>
-                <user>root</user>
-                <group>root</group>
-                <filemode>755</filemode>
-              </mapper>
-            </data>
-            <data>
-              <src>src/main/python/ambari-server.py</src>
-              <type>file</type>
-              <mapper>
-                <type>perm</type>
-                <prefix>/usr/sbin</prefix>
-                <user>root</user>
-                <group>root</group>
-                <filemode>755</filemode>
-              </mapper>
-            </data>
-            <data>
-              <src>../ambari-common/src/main/unix/ambari-python-wrap</src>
-              <type>file</type>
-              <mapper>
-                <type>perm</type>
-                <prefix>/var/lib/ambari-server</prefix>
-                <user>root</user>
-                <group>root</group>
-                <filemode>755</filemode>
-              </mapper>
-            </data>
-            <data>
-              <src>${basedir}/target/ambari-server</src>
-              <type>file</type>
-              <mapper>
-                <type>perm</type>
-                <prefix>/etc/init.d/</prefix>
-                <user>root</user>
-                <group>root</group>
-                <filemode>755</filemode>
-              </mapper>
-            </data>
-            <data>
-              <src>${ambariProperties}</src>
-              <type>file</type>
-              <mapper>
-                <type>perm</type>
-                <prefix>/etc/ambari-server/conf</prefix>
-              </mapper>
-            </data>
-            <data>
-              <src>conf/unix/log4j.properties</src>
-              <type>file</type>
-              <mapper>
-                <type>perm</type>
-                <prefix>/etc/ambari-server/conf</prefix>
-              </mapper>
-            </data>
-            <data>
-              <src>conf/unix/krb5JAASLogin.conf</src>
-              <type>file</type>
-              <mapper>
-                <type>perm</type>
-                <prefix>/etc/ambari-server/conf</prefix>
-              </mapper>
-            </data>
-             <!-- /q001 -->
-            <data>
-              <src>conf/unix/ambari-env.sh</src>
-              <type>file</type>
-              <mapper>
-                <type>perm</type>
-                <prefix>/var/lib/ambari-server/</prefix>
-                <user>root</user>
-                <group>root</group>
-                <filemode>700</filemode>
-              </mapper>
-            </data>
-            <data>
-              <src>conf/unix/ambari-sudo.sh</src>
-              <type>file</type>
-              <mapper>
-                <type>perm</type>
-                <prefix>/var/lib/ambari-server/</prefix>
-                <user>root</user>
-                <group>root</group>
-                <filemode>700</filemode>
-              </mapper>
-            </data>
-            <data>
-              <src>conf/unix/install-helper.sh</src>
-              <type>file</type>
-              <mapper>
-                <type>perm</type>
-                <prefix>/var/lib/ambari-server/</prefix>
-                <user>root</user>
-                <group>root</group>
-                <filemode>700</filemode>
-              </mapper>
-            </data>
-            <data>
-              <src>src/main/resources/slider_resources/README.txt</src>
-              <type>file</type>
-              <mapper>
-                <type>perm</type>
-                <prefix>/var/lib/ambari-server/resources/apps/</prefix>
-                <user>root</user>
-                <group>root</group>
-                <filemode>755</filemode>
-              </mapper>
-            </data>
-            <data>
-              <src>conf/unix/ca.config</src>
-              <type>file</type>
-              <mapper>
-                <type>perm</type>
-                <prefix>/var/lib/ambari-server/keys</prefix>
-              </mapper>
-            </data>
-            <data>
-              <src>src/main/resources/db</src>
-              <type>directory</type>
-              <mapper>
-                <type>perm</type>
-                <prefix>/var/lib/ambari-server/keys/db</prefix>
-                <user>root</user>
-                <group>root</group>
-                <filemode>700</filemode>
-              </mapper>
-            </data>
-            <data>
-              <src>target/classes/Ambari-DDL-Postgres-CREATE.sql</src>
-              <type>file</type>
-              <mapper>
-                <type>perm</type>
-                <prefix>/var/lib/ambari-server/resources</prefix>
-              </mapper>
-            </data>
-            <data>
-              <src>src/main/resources/Ambari-DDL-Postgres-DROP.sql</src>
-              <type>file</type>
-              <mapper>
-                <type>perm</type>
-                <prefix>/var/lib/ambari-server/resources</prefix>
-              </mapper>
-            </data>
-            <data>
-              <src>target/classes/Ambari-DDL-Postgres-EMBEDDED-CREATE.sql</src>
-              <type>file</type>
-              <mapper>
-                <type>perm</type>
-                <prefix>/var/lib/ambari-server/resources</prefix>
-              </mapper>
-            </data>
-            <data>
-              <src>src/main/resources/Ambari-DDL-Postgres-EMBEDDED-DROP.sql</src>
-              <type>file</type>
-              <mapper>
-                <type>perm</type>
-                <prefix>/var/lib/ambari-server/resources</prefix>
-              </mapper>
-            </data>
-            <data>
-              <src>target/classes/Ambari-DDL-Oracle-CREATE.sql</src>
-              <type>file</type>
-              <mapper>
-                <type>perm</type>
-                <prefix>/var/lib/ambari-server/resources</prefix>
-              </mapper>
-            </data>
-            <data>
-              <src>target/classes/Ambari-DDL-MySQL-CREATE.sql</src>
-              <type>file</type>
-              <mapper>
-                <type>perm</type>
-                <prefix>/var/lib/ambari-server/resources</prefix>
-              </mapper>
-            </data>
-            <data>
-              <src>src/main/resources/Ambari-DDL-Oracle-DROP.sql</src>
-              <type>file</type>
-              <mapper>
-                <type>perm</type>
-                <prefix>/var/lib/ambari-server/resources</prefix>
-              </mapper>
-            </data>
-            <data>
-              <src>src/main/resources/Ambari-DDL-MySQL-DROP.sql</src>
-              <type>file</type>
-              <mapper>
-                <type>perm</type>
-                <prefix>/var/lib/ambari-server/resources</prefix>
-              </mapper>
-            </data>
-            <data>
-              <src>target/classes/Ambari-DDL-SQLServer-CREATE.sql</src>
-              <type>file</type>
-              <mapper>
-                <type>perm</type>
-                <prefix>/var/lib/ambari-server/resources</prefix>
-              </mapper>
-            </data>
-            <data>
-              <src>target/classes/Ambari-DDL-SQLServer-CREATELOCAL.sql</src>
-              <type>file</type>
-              <mapper>
-                <type>perm</type>
-                <prefix>/var/lib/ambari-server/resources</prefix>
-              </mapper>
-            </data>
-            <data>
-              <src>src/main/resources/Ambari-DDL-SQLServer-DROP.sql</src>
-              <type>file</type>
-              <mapper>
-                <type>perm</type>
-                <prefix>/var/lib/ambari-server/resources</prefix>
-              </mapper>
-            </data>
-            <data>
-              <src>target/classes/Ambari-DDL-SQLAnywhere-CREATE.sql</src>
-              <type>file</type>
-              <mapper>
-                <type>perm</type>
-                <prefix>/var/lib/ambari-server/resources</prefix>
-              </mapper>
-            </data>
-            <data>
-              <src>src/main/resources/Ambari-DDL-SQLAnywhere-DROP.sql</src>
-              <type>file</type>
-              <mapper>
-                <type>perm</type>
-                <prefix>/var/lib/ambari-server/resources</prefix>
-              </mapper>
-            </data>
-            <data>
-              <src>${project.build.directory}/DBConnectionVerification.jar</src>
-              <type>file</type>
-              <mapper>
-                <type>perm</type>
-                <prefix>/var/lib/ambari-server/resources</prefix>
-              </mapper>
-            </data>
-            <data>
-              <src>src/main/resources/scripts</src>
-              <type>directory</type>
-              <mapper>
-                <type>perm</type>
-                <prefix>/var/lib/ambari-server/resources/scripts</prefix>
-                <filemode>755</filemode>
-              </mapper>
-            </data>
-            <data>
-              <src>${ambari-admin-dir}/target</src>
-              <type>directory</type>
-              <includes>*.jar</includes>
-              <mapper>
-                <type>perm</type>
-                <prefix>/var/lib/ambari-server/resources/views</prefix>
-              </mapper>
-            </data>
-            <data>
-              <src>src/main/python/upgradeHelper.py</src>
-              <type>file</type>
-              <mapper>
-                <type>perm</type>
-                <prefix>/var/lib/ambari-server/resources/scripts</prefix>
-                <filemode>755</filemode>
-              </mapper>
-            </data>
-            <data>
-              <src>src/main/resources/upgrade/ddl</src>
-              <type>directory</type>
-              <mapper>
-                <type>perm</type>
-                <prefix>/var/lib/ambari-server/resources/upgrade/ddl</prefix>
-              </mapper>
-            </data>
-            <data>
-              <src>src/main/resources/upgrade/dml</src>
-              <type>directory</type>
-              <mapper>
-                <type>perm</type>
-                <prefix>/var/lib/ambari-server/resources/upgrade/dml</prefix>
-              </mapper>
-            </data>
-            <data>
-              <src>${commonServicesSrcLocation}</src>
-              <type>directory</type>
-              <mapper>
-                <type>perm</type>
-                <prefix>/var/lib/ambari-server/resources/common-services</prefix>
-              </mapper>
-            </data>
-            <data>
-              <src>src/main/resources/upgrade/catalog</src>
-              <type>directory</type>
-              <mapper>
-                <type>perm</type>
-                <prefix>/var/lib/ambari-server/resources/upgrade/catalog</prefix>
-              </mapper>
-            </data>
-            <data>
-              <src>${stacksSrcLocation}</src>
-              <type>directory</type>
-              <mapper>
-                <type>perm</type>
-                <prefix>/var/lib/ambari-server/resources/stacks/${stack.distribution}</prefix>
-              </mapper>
-            </data>
-            <data>
-              <src>target/classes/stacks/stack_advisor.py</src>
-              <type>file</type>
-              <mapper>
-                <type>perm</type>
-                <prefix>/var/lib/ambari-server/resources/stacks</prefix>
-                <user>root</user>
-                <group>root</group>
-                <filemode>755</filemode>
-              </mapper>
-            </data>
-            <data>
-              <src>src/main/python/ambari_server</src>
-              <type>directory</type>
-              <mapper>
-                <type>perm</type>
-                <prefix>/usr/lib/python2.6/site-packages/ambari_server</prefix>
-                <user>root</user>
-                <group>root</group>
-                <filemode>755</filemode>
-              </mapper>
-            </data>
-            <data>
-              <src>src/main/python/bootstrap.py</src>
-              <type>file</type>
-              <mapper>
-                <type>perm</type>
-                <prefix>/usr/lib/python2.6/site-packages/ambari_server</prefix>
-                <user>root</user>
-                <group>root</group>
-                <filemode>755</filemode>
-              </mapper>
-            </data>
-            <data>
-              <src>src/main/python/setupAgent.py</src>
-              <type>file</type>
-              <mapper>
-                <type>perm</type>
-                <prefix>/usr/lib/python2.6/site-packages/ambari_server</prefix>
-                <user>root</user>
-                <group>root</group>
-                <filemode>755</filemode>
-              </mapper>
-            </data>
-            <data>
-              <src>src/main/python/os_check_type.py</src>
-              <type>file</type>
-              <mapper>
-                <type>perm</type>
-                <prefix>/usr/lib/python2.6/site-packages/ambari_server</prefix>
-                <user>root</user>
-                <group>root</group>
-                <filemode>755</filemode>
-              </mapper>
-            </data>
-            <data>
-              <src>${basedir}/target/version</src>
-              <type>file</type>
-              <mapper>
-                <type>perm</type>
-                <prefix>/var/lib/ambari-server/resources</prefix>
-              </mapper>
-            </data>
-            <data>
-              <src>src/main/resources/custom_action_definitions</src>
-              <type>directory</type>
-              <mapper>
-                <type>perm</type>
-                <prefix>/var/lib/ambari-server/resources/custom_action_definitions</prefix>
-              </mapper>
-            </data>
-            <data>
-              <src>${customActionsRoot}</src>
-              <type>directory</type>
-              <mapper>
-                <type>perm</type>
-                <prefix>/var/lib/ambari-server/resources/custom_actions</prefix>
-                <user>root</user>
-                <group>root</group>
-                <filemode>755</filemode>
-              </mapper>
-            </data>
-            <data>
-              <src>src/main/resources/host_scripts</src>
-              <type>directory</type>
-              <mapper>
-                <type>perm</type>
-                <prefix>/var/lib/ambari-server/resources/host_scripts</prefix>
-                <user>root</user>
-                <group>root</group>
-                <filemode>755</filemode>
-              </mapper>
-            </data>          
-            <data>
-              <src>
-                ${project.basedir}/../ambari-common/src/main/python/ambari_commons
-              </src>
-              <type>directory</type>
-              <mapper>
-                <type>perm</type>
-                <prefix>${ambari_commons.install.dir}</prefix>
-                <filemode>755</filemode>
-                <user>root</user>
-                <group>root</group>
-              </mapper>
-            </data>
-            <data>
-              <src>
-                ${resourceManagementSrcLocation}
-              </src>
-              <type>directory</type>
-              <mapper>
-                <type>perm</type>
-                <prefix>${resource_management.install.dir}</prefix>
-                <filemode>755</filemode>
-                <user>root</user>
-                <group>root</group>
-              </mapper>
-            </data>
-            <data>
-              <src>${project.basedir}/../ambari-common/src/main/python/ambari_jinja2/ambari_jinja2</src>
-              <excludes>${project.basedir}/../ambari-common/src/main/python/ambari_jinja2/ambari_jinja2/testsuite</excludes>
-              <type>directory</type>
-              <mapper>
-                <type>perm</type>
-                <prefix>${jinja.install.dir}</prefix>
-                <filemode>755</filemode>
-                <user>root</user>
-                <group>root</group>
-              </mapper>
-            </data>
-            <data>
-              <src>${project.basedir}/../ambari-common/src/main/python/ambari_simplejson</src>
-              <type>directory</type>
+              <src>${project.build.directory}/${project.artifactId}-${project.version}-dist.tar.gz</src>
+              <type>archive</type>
               <mapper>
                 <type>perm</type>
-                <prefix>${simplejson.install.dir}</prefix>
-                <filemode>755</filemode>
                 <user>root</user>
                 <group>root</group>
               </mapper>

http://git-wip-us.apache.org/repos/asf/ambari/blob/2fd458e7/ambari-server/src/main/assemblies/server.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/assemblies/server.xml b/ambari-server/src/main/assemblies/server.xml
index 0c88810..c24bce6 100644
--- a/ambari-server/src/main/assemblies/server.xml
+++ b/ambari-server/src/main/assemblies/server.xml
@@ -23,85 +23,315 @@
     <format>tar.gz</format>
   </formats>
   <includeBaseDirectory>false</includeBaseDirectory>
-  <files>
-    <file>
-      <source>${project.build.directory}/${artifact.artifactId}-${artifact.version}.jar</source>
-      <outputDirectory>ambari-server-${project.version}/lib/ambari-server</outputDirectory>
-    </file>
-   <file>
-      <source>${basedir}/src/main/python/ambari-server.py</source>
-      <outputDirectory>/ambari-server-${project.version}/sbin</outputDirectory>
-    </file>
-    <file>
-      <source>${basedir}/src/main/python/bootstrap.py</source>
-      <outputDirectory>/ambari-server-${project.version}/sbin</outputDirectory>
-    </file>
-    <file>
-      <source>${basedir}/src/main/python/setupAgent.py</source>
-      <outputDirectory>/ambari-server-${project.version}/sbin</outputDirectory>
-    </file>
-  </files>
+  <!-- File sets. Syntax:
+	  <fileSets>
+	    <fileSet>
+	      <useDefaultExcludes/>
+	      <outputDirectory/>
+	      <includes/>
+	      <excludes/>
+	      <fileMode/>
+	      <directoryMode/>
+	      <directory/>
+	      <lineEnding/>
+	      <filtered/>
+	    </fileSet>
+	  </fileSets>
+  -->
   <fileSets>
-    <!-- Distro files, readme, licenses, etc -->
     <fileSet>
-      <directory>${basedir}/../</directory>
-      <outputDirectory>ambari-server-${project.version}/</outputDirectory>
-      <includes>
-        <include>*.txt</include>
-      </includes>
+      <directory>${ambari-web-dir}</directory>
+      <outputDirectory>/usr/lib/ambari-server/web</outputDirectory>
     </fileSet>
-     <!--
     <fileSet>
-      <directory>${project.build.directory}/web/</directory>
-      <outputDirectory>ambari-server-${project.version}/web/</outputDirectory>
-      <includes>
-        <include>*</include>
-      </includes>
+      <directory>${project.basedir}/../ambari-common/src/main/python/ambari_commons</directory>
+      <outputDirectory>${ambari_commons.install.dir}</outputDirectory>
     </fileSet>
-    -->
-    <!--
     <fileSet>
-      <directory>${basedir}/src/main/bin</directory>
-      <outputDirectory>ambari-server-${project.version}/bin</outputDirectory>
-      <includes>
-        <include>*</include>
-      </includes>
-      <fileMode>0755</fileMode>
+      <directory>${resourceManagementSrcLocation}</directory>
+      <outputDirectory>${resource_management.install.dir}</outputDirectory>
     </fileSet>
-    -->
     <fileSet>
-      <directory>${basedir}/src/main/resources/</directory>
-      <outputDirectory>/ambari-server-${project.version}/keystore</outputDirectory>
-      <includes>
-        <include>db/*</include>
-        <include>ca.config</include>
-        <include>pass.txt</include>
-      </includes>
+      <directory>${project.basedir}/../ambari-common/src/main/python/ambari_jinja2/ambari_jinja2</directory>
+      <outputDirectory>${jinja.install.dir}</outputDirectory>
+      <excludes>
+      	<exclude>**/testsuite/**</exclude>
+      </excludes>
     </fileSet>
     <fileSet>
-      <directory>${basedir}/../ambari-web/public</directory>
-      <outputDirectory>ambari-server-${project.version}/web</outputDirectory>
-      <includes>
-        <include>**</include>
-      </includes>
+      <directory>${project.basedir}/../ambari-common/src/main/python/ambari_simplejson</directory>
+      <outputDirectory>${simplejson.install.dir}</outputDirectory>
+    </fileSet>
+    <fileSet>
+      <fileMode>700</fileMode>
+      <directory>src/main/resources/db</directory>
+      <outputDirectory>/var/lib/ambari-server/keys/db</outputDirectory>
+    </fileSet>
+    <fileSet>
+      <directoryMode>755</directoryMode>
+      <directory>${empty.dir}</directory>
+      <outputDirectory>/var/run/ambari-server</outputDirectory>
+	  <excludes>
+	    <exclude>*/**</exclude>
+	  </excludes>
+    </fileSet>
+    <fileSet>
+      <directoryMode>755</directoryMode>
+      <directory>${empty.dir}</directory>
+      <outputDirectory>/var/run/ambari-server/bootstrap</outputDirectory>
+	  <excludes>
+	    <exclude>*/**</exclude>
+	  </excludes>
+    </fileSet>
+    <fileSet>
+      <directoryMode>755</directoryMode>
+      <directory>${empty.dir}</directory>
+      <outputDirectory>/var/run/ambari-server/stack-recommendations</outputDirectory>
+	  <excludes>
+	    <exclude>*/**</exclude>
+	  </excludes>
+    </fileSet>
+    <fileSet>
+      <directoryMode>755</directoryMode>
+      <directory>${empty.dir}</directory>
+      <outputDirectory>/var/log/ambari-server</outputDirectory>
+	  <excludes>
+	    <exclude>*/**</exclude>
+	  </excludes>
+    </fileSet>
+    <fileSet>
+      <directoryMode>777</directoryMode>
+      <directory>${empty.dir}</directory>
+      <outputDirectory>/var/lib/ambari-server/data/tmp</outputDirectory>
+	  <excludes>
+	    <exclude>*/**</exclude>
+	  </excludes>
     </fileSet>
     <fileSet>
-      <directory>src/main/conf</directory>
-      <outputDirectory>/ambari-server-${project.version}/etc/ambari-server/conf</outputDirectory>
+      <directoryMode>700</directoryMode>
+      <directory>${empty.dir}</directory>
+      <outputDirectory>/var/lib/ambari-server/data/cache</outputDirectory>
+	  <excludes>
+	    <exclude>*/**</exclude>
+	  </excludes>
     </fileSet>
     <fileSet>
-      <directory>${tarballResourcesFolder}</directory>
-      <outputDirectory>/ambari-server-${project.version}/var/lib/ambari-server/resources/</outputDirectory>
+      <directoryMode>700</directoryMode>
+      <directory>${empty.dir}</directory>
+      <outputDirectory>/var/lib/ambari-server/resources/upgrade</outputDirectory>
+	  <excludes>
+	    <exclude>*/**</exclude>
+	  </excludes>
+    </fileSet>
+    <fileSet>
+      <directory>src/main/resources/scripts</directory>
+      <outputDirectory>/var/lib/ambari-server/resources/scripts</outputDirectory>
+    </fileSet>
+    <fileSet>
+      <directory>${ambari-admin-dir}/target</directory>
+      <outputDirectory>/var/lib/ambari-server/resources/views</outputDirectory>
       <includes>
-        <include>common-services/**</include>
-        <include>stacks/stack_advisor.py</include>
-        <include>stacks/${stack.distribution}/**</include>
+        <include>*.jar</include>
       </includes>
     </fileSet>
+    <fileSet>
+      <directory>src/main/resources/upgrade/ddl</directory>
+      <outputDirectory>/var/lib/ambari-server/resources/upgrade/ddl</outputDirectory>
+    </fileSet>
+    <fileSet>
+      <directory>src/main/resources/upgrade/dml</directory>
+      <outputDirectory>/var/lib/ambari-server/resources/upgrade/dml</outputDirectory>
+    </fileSet>
+     <fileSet>
+      <directory>${commonServicesSrcLocation}</directory>
+      <outputDirectory>/var/lib/ambari-server/resources/common-services</outputDirectory>
+    </fileSet>
+    <fileSet>
+      <directory>src/main/resources/upgrade/catalog</directory>
+      <outputDirectory>/var/lib/ambari-server/resources/upgrade/catalog</outputDirectory>
+    </fileSet>
+    <fileSet>
+      <directory>${stacksSrcLocation}</directory>
+      <outputDirectory>/var/lib/ambari-server/resources/stacks/${stack.distribution}</outputDirectory>
+    </fileSet>
+    <fileSet>
+      <directory>src/main/python/ambari_server</directory>
+      <outputDirectory>/usr/lib/python2.6/site-packages/ambari_server</outputDirectory>
+    </fileSet>
+    <fileSet>
+      <directory>src/main/resources/custom_action_definitions</directory>
+      <outputDirectory>/var/lib/ambari-server/resources/custom_action_definitions</outputDirectory>
+    </fileSet>
+    <fileSet>
+      <directory>${customActionsRoot}</directory>
+      <outputDirectory>/var/lib/ambari-server/resources/custom_actions</outputDirectory>
+    </fileSet>
+    <fileSet>
+      <directory>src/main/resources/host_scripts</directory>
+      <outputDirectory>/var/lib/ambari-server/resources/host_scripts</outputDirectory>
+    </fileSet>
   </fileSets>
+  <!-- Single files. Syntax:
+	  <files>
+	    <file>
+	      <source/>
+	      <outputDirectory/>
+	      <destName/>
+	      <fileMode/>
+	      <lineEnding/>
+	      <filtered/>
+	    </file>
+	  </files>
+  -->
+  <files>
+    <file>
+      <source>${project.build.directory}/${project.artifactId}-${project.version}.jar</source>
+      <outputDirectory>/usr/lib/ambari-server</outputDirectory>
+    </file>
+    <file>
+      <fileMode>755</fileMode>
+      <source>src/main/python/ambari-server.py</source>
+      <outputDirectory>/usr/sbin</outputDirectory>
+    </file>
+    <file>
+      <fileMode>755</fileMode>
+      <source>src/main/python/ambari_server_main.py</source>
+      <outputDirectory>/usr/sbin</outputDirectory>
+    </file>
+    <file>
+      <fileMode>755</fileMode>
+      <source>sbin/ambari-server</source>
+      <outputDirectory>/etc/init.d</outputDirectory>
+    </file>
+    <file>
+      <fileMode>755</fileMode>
+      <source>../ambari-common/src/main/unix/ambari-python-wrap</source>
+      <outputDirectory>/var/lib/ambari-server/</outputDirectory>
+    </file>
+    <file>
+      <source>${ambariProperties}</source>
+      <outputDirectory>/etc/ambari-server/conf</outputDirectory>
+    </file>
+    <file>
+      <source>conf/unix/log4j.properties</source>
+      <outputDirectory>/etc/ambari-server/conf</outputDirectory>
+    </file>
+    <file>
+      <source>conf/unix/krb5JAASLogin.conf</source>
+      <outputDirectory>/etc/ambari-server/conf</outputDirectory>
+    </file>
+    <file>
+      <fileMode>700</fileMode>
+      <source>conf/unix/ambari-env.sh</source>
+      <outputDirectory>/var/lib/ambari-server/</outputDirectory>
+    </file>
+    <file>
+      <fileMode>700</fileMode>
+      <source>conf/unix/ambari-sudo.sh</source>
+      <outputDirectory>/var/lib/ambari-server/</outputDirectory>
+    </file>
+    <file>
+      <fileMode>700</fileMode>
+      <source>conf/unix/install-helper.sh</source>
+      <outputDirectory>/var/lib/ambari-server/</outputDirectory>
+    </file>
+    <file>
+      <source>conf/unix/ca.config</source>
+      <outputDirectory>/var/lib/ambari-server/keys</outputDirectory>
+    </file>
+    <file>
+      <source>target/classes/Ambari-DDL-Postgres-CREATE.sql</source>
+      <outputDirectory>/var/lib/ambari-server/resources</outputDirectory>
+    </file>
+    <file>
+      <source>src/main/resources/Ambari-DDL-Postgres-DROP.sql</source>
+      <outputDirectory>/var/lib/ambari-server/resources</outputDirectory>
+    </file>
+    <file>
+      <source>target/classes/Ambari-DDL-Postgres-EMBEDDED-CREATE.sql</source>
+      <outputDirectory>/var/lib/ambari-server/resources</outputDirectory>
+    </file>
+    <file>
+      <source>src/main/resources/Ambari-DDL-Postgres-EMBEDDED-DROP.sql</source>
+      <outputDirectory>/var/lib/ambari-server/resources</outputDirectory>
+    </file>
+    <file>
+      <source>target/classes/Ambari-DDL-Oracle-CREATE.sql</source>
+      <outputDirectory>/var/lib/ambari-server/resources</outputDirectory>
+    </file>
+    <file>
+      <source>target/classes/Ambari-DDL-MySQL-CREATE.sql</source>
+      <outputDirectory>/var/lib/ambari-server/resources</outputDirectory>
+    </file>
+    <file>
+      <source>src/main/resources/Ambari-DDL-Oracle-DROP.sql</source>
+      <outputDirectory>/var/lib/ambari-server/resources</outputDirectory>
+    </file>
+    <file>
+      <source>src/main/resources/Ambari-DDL-MySQL-DROP.sql</source>
+      <outputDirectory>/var/lib/ambari-server/resources</outputDirectory>
+    </file>
+    <file>
+      <source>target/classes/Ambari-DDL-SQLServer-CREATE.sql</source>
+      <outputDirectory>/var/lib/ambari-server/resources</outputDirectory>
+    </file>
+    <file>
+      <source>target/classes/Ambari-DDL-SQLServer-CREATELOCAL.sql</source>
+      <outputDirectory>/var/lib/ambari-server/resources</outputDirectory>
+    </file>
+    <file>
+      <source>src/main/resources/Ambari-DDL-SQLServer-DROP.sql</source>
+      <outputDirectory>/var/lib/ambari-server/resources</outputDirectory>
+    </file>
+    <file>
+      <source>target/classes/Ambari-DDL-SQLAnywhere-CREATE.sql</source>
+      <outputDirectory>/var/lib/ambari-server/resources</outputDirectory>
+    </file>
+    <file>
+      <source>src/main/resources/Ambari-DDL-SQLAnywhere-DROP.sql</source>
+      <outputDirectory>/var/lib/ambari-server/resources</outputDirectory>
+    </file>
+    <file>
+      <source>${project.build.directory}/DBConnectionVerification.jar</source>
+      <outputDirectory>/var/lib/ambari-server/resources</outputDirectory>
+    </file>
+    <file>
+      <source>src/main/resources/slider_resources/README.txt</source>
+      <outputDirectory>/var/lib/ambari-server/resources/apps</outputDirectory>
+    </file>
+    <file>
+      <fileMode>755</fileMode>
+      <source>src/main/python/upgradeHelper.py</source>
+      <outputDirectory>/var/lib/ambari-server/resources/scripts</outputDirectory>
+    </file>
+    <file>
+      <fileMode>755</fileMode>
+      <source>target/classes/stacks/stack_advisor.py</source>
+      <outputDirectory>/var/lib/ambari-server/resources/stacks</outputDirectory>
+    </file>
+    <file>
+      <fileMode>755</fileMode>
+      <source>src/main/python/bootstrap.py</source>
+      <outputDirectory>/usr/lib/python2.6/site-packages/ambari_server</outputDirectory>
+    </file>
+    <file>
+      <fileMode>755</fileMode>
+      <source>src/main/python/setupAgent.py</source>
+      <outputDirectory>/usr/lib/python2.6/site-packages/ambari_server</outputDirectory>
+    </file>
+    <file>
+      <fileMode>755</fileMode>
+      <source>src/main/python/os_check_type.py</source>
+      <outputDirectory>/usr/lib/python2.6/site-packages/ambari_server</outputDirectory>
+    </file>
+    <file>
+      <source>${basedir}/target/version</source>
+      <outputDirectory>/var/lib/ambari-server/resources</outputDirectory>
+    </file>
+  </files>    
   <dependencySets>
     <dependencySet>
-      <outputDirectory>ambari-server-${project.version}/lib/ambari-server</outputDirectory>
+      <outputDirectory>/usr/lib/ambari-server</outputDirectory>
       <unpack>false</unpack>
       <scope>compile</scope>
     </dependencySet>


[21/39] ambari git commit: AMBARI-14839 - DEA is not enabled due to configuration conflict and history log directory can not be changed (Jeff Zhang via jonathanhurley)

Posted by nc...@apache.org.
AMBARI-14839 - DEA is not enabled due to configuration conflict and history log directory can not be changed (Jeff Zhang via jonathanhurley)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/981ede54
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/981ede54
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/981ede54

Branch: refs/heads/branch-dev-patch-upgrade
Commit: 981ede5455c02ae1bd4aac7495c2c667bc889dbc
Parents: d634699
Author: Jonathan Hurley <jh...@hortonworks.com>
Authored: Thu Jan 28 23:11:37 2016 -0500
Committer: Jonathan Hurley <jh...@hortonworks.com>
Committed: Thu Jan 28 23:12:48 2016 -0500

----------------------------------------------------------------------
 .../SPARK/1.2.0.2.2/package/scripts/params.py   |  4 ++-
 .../SPARK/configuration/spark-defaults.xml      |  4 +--
 .../configuration/spark-thrift-sparkconf.xml    | 31 ++++++++++++++++----
 3 files changed, 31 insertions(+), 8 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/981ede54/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/params.py b/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/params.py
index 935c47e..abf9191 100644
--- a/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/params.py
@@ -86,7 +86,9 @@ hive_user = status_params.hive_user
 spark_group = status_params.spark_group
 user_group = status_params.user_group
 spark_hdfs_user_dir = format("/user/{spark_user}")
-spark_history_dir = 'hdfs:///spark-history'
+spark_history_dir = "hdfs:///spark-history"
+if 'spark-defaults' in config['configurations'] and 'spark.history.fs.logDirectory' in config['configurations']['spark-defaults']:
+  spark_history_dir = config['configurations']['spark-defaults']['spark.history.fs.logDirectory']
 
 spark_history_server_pid_file = status_params.spark_history_server_pid_file
 spark_thrift_server_pid_file = status_params.spark_thrift_server_pid_file

http://git-wip-us.apache.org/repos/asf/ambari/blob/981ede54/ambari-server/src/main/resources/stacks/HDP/2.4/services/SPARK/configuration/spark-defaults.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.4/services/SPARK/configuration/spark-defaults.xml b/ambari-server/src/main/resources/stacks/HDP/2.4/services/SPARK/configuration/spark-defaults.xml
index d8af790..1a6552f 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.4/services/SPARK/configuration/spark-defaults.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.4/services/SPARK/configuration/spark-defaults.xml
@@ -31,7 +31,7 @@
     </property>
     <property>
         <name>spark.history.fs.logDirectory</name>
-        <value>{{spark_history_dir}}</value>
+        <value>hdfs:///spark-history</value>
         <description>
             Base directory for history spark application log.
         </description>
@@ -45,7 +45,7 @@
     </property>
     <property>
         <name>spark.eventLog.dir</name>
-        <value>{{spark_history_dir}}</value>
+        <value>hdfs:///spark-history</value>
         <description>
             Base directory in which Spark events are logged, if spark.eventLog.enabled is true.
         </description>

http://git-wip-us.apache.org/repos/asf/ambari/blob/981ede54/ambari-server/src/main/resources/stacks/HDP/2.4/services/SPARK/configuration/spark-thrift-sparkconf.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.4/services/SPARK/configuration/spark-thrift-sparkconf.xml b/ambari-server/src/main/resources/stacks/HDP/2.4/services/SPARK/configuration/spark-thrift-sparkconf.xml
index 3b13496..2dbfe51 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.4/services/SPARK/configuration/spark-thrift-sparkconf.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.4/services/SPARK/configuration/spark-thrift-sparkconf.xml
@@ -75,14 +75,17 @@
   <property>
     <name>spark.history.fs.logDirectory</name>
     <value>{{spark_history_dir}}</value>
+    <final>true</final>
     <description>
-      Base directory for history spark application log.
+      Base directory for history spark application log. It is the same value
+      as in spark-defaults.xml.
     </description>
   </property>
 
   <property>
     <name>spark.eventLog.enabled</name>
     <value>true</value>
+    <final>true</final>
     <description>
       Whether to log Spark events, useful for reconstructing the Web UI after the application has finished.
     </description>
@@ -91,8 +94,10 @@
   <property>
     <name>spark.eventLog.dir</name>
     <value>{{spark_history_dir}}</value>
+    <final>true</final>
     <description>
-      Base directory in which Spark events are logged, if spark.eventLog.enabled is true.
+      Base directory in which Spark events are logged, if spark.eventLog.enabled is true. It is the same value
+      as in spark-defaults.xml.
     </description>
   </property>
 
@@ -138,10 +143,26 @@
   </property>
 
   <property>
-    <name>spark.executor.instances</name>
-    <value>2</value>
+    <name>spark.dynamicAllocation.initialExecutors</name>
+    <value>0</value>
     <description>
-      The number of executor.
+      Initial number of executors to run if dynamic allocation is enabled.
+    </description>
+  </property>
+
+  <property>
+    <name>spark.dynamicAllocation.maxExecutors</name>
+    <value>10</value>
+    <description>
+      Upper bound for the number of executors if dynamic allocation is enabled.
+    </description>
+  </property>
+
+  <property>
+    <name>spark.dynamicAllocation.minExecutors</name>
+    <value>0</value>
+    <description>
+      Lower bound for the number of executors if dynamic allocation is enabled.
     </description>
   </property>
 


[28/39] ambari git commit: AMBARI-14843. Admin View: add page "Login Message" with full functionality (alexantonenko)

Posted by nc...@apache.org.
AMBARI-14843. Admin View: add page "Login Message" with full functionality (alexantonenko)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/5facf9a5
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/5facf9a5
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/5facf9a5

Branch: refs/heads/branch-dev-patch-upgrade
Commit: 5facf9a5e7fecfbf433a8af274911b527f737fcb
Parents: 44c9866
Author: Alex Antonenko <hi...@gmail.com>
Authored: Fri Jan 29 18:19:36 2016 +0200
Committer: Alex Antonenko <hi...@gmail.com>
Committed: Sat Jan 30 01:06:32 2016 +0200

----------------------------------------------------------------------
 .../main/resources/ui/admin-web/app/index.html  |  1 +
 .../loginActivities/LoginMessageMainCtrl.js     | 67 ++++++++++++++++++++
 .../ui/admin-web/app/scripts/i18n.config.js     |  5 +-
 .../resources/ui/admin-web/app/styles/main.css  |  6 ++
 .../app/views/loginActivities/loginMessage.html | 50 ++++++++++++++-
 5 files changed, 127 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/5facf9a5/ambari-admin/src/main/resources/ui/admin-web/app/index.html
----------------------------------------------------------------------
diff --git a/ambari-admin/src/main/resources/ui/admin-web/app/index.html b/ambari-admin/src/main/resources/ui/admin-web/app/index.html
index 268e92f..fd2c6b8 100644
--- a/ambari-admin/src/main/resources/ui/admin-web/app/index.html
+++ b/ambari-admin/src/main/resources/ui/admin-web/app/index.html
@@ -129,6 +129,7 @@
     <script src="scripts/controllers/NavbarCtrl.js"></script>
     <script src="scripts/controllers/authentication/AuthenticationMainCtrl.js"></script>
     <script src="scripts/controllers/loginActivities/LoginActivitiesMainCtrl.js"></script>
+    <script src="scripts/controllers/loginActivities/LoginMessageMainCtrl.js"></script>
     <script src="scripts/controllers/users/UsersCreateCtrl.js"></script>
     <script src="scripts/controllers/users/UsersListCtrl.js"></script>
     <script src="scripts/controllers/users/UsersShowCtrl.js"></script>

http://git-wip-us.apache.org/repos/asf/ambari/blob/5facf9a5/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/loginActivities/LoginMessageMainCtrl.js
----------------------------------------------------------------------
diff --git a/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/loginActivities/LoginMessageMainCtrl.js b/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/loginActivities/LoginMessageMainCtrl.js
new file mode 100644
index 0000000..11fcea5
--- /dev/null
+++ b/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/loginActivities/LoginMessageMainCtrl.js
@@ -0,0 +1,67 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+'use strict';
+
+angular.module('ambariAdminConsole')
+  .controller('LoginMessageMainCtrl',['$scope', 'Alert', '$timeout', '$http', '$translate', function($scope, Alert, $timeout, $http, $translate) {
+    var $t = $translate.instant;
+    $scope.status = false;
+    $scope.motdExists = false;
+    $scope.text = "";
+    $scope.submitDisabled = true;
+
+    $http.get('/api/v1/admin-settings/motd').then(function (res) {
+      var respons = JSON.parse(res.data.AdminSettings.content);
+      $scope.text = respons.text ? respons.text : "";
+      $scope.status = respons.status && respons.status == "true" ? true : false;
+      $scope.motdExists = true;
+    });
+
+    $scope.inputChangeEvent = function(){
+      $scope.submitDisabled = false;
+    };
+    $scope.changeStatus = function(){
+      $scope.status = !$scope.status;
+      $scope.submitDisabled = false;
+    };
+
+    $scope.saveLoginMsg = function(form) {
+      var method = $scope.motdExists ? 'PUT' : 'POST';
+      var data = {
+        'AdminSettings' : {
+          'content' : '{"text":"' + $scope.text + '", "status":"' + $scope.status + '"}',
+          'name' : 'motd',
+          'setting_type' : 'ambari-server'
+        }
+      };
+      form.submitted = true;
+      if (form.$valid){
+        $scope.submitDisabled = true;
+        $http({
+          method: method,
+          url: '/api/v1/admin-settings/' + ($scope.motdExists ? 'motd' : ''),
+          data: data
+        }).then(function successCallback() {
+          $scope.motdExists = true;
+        }, function errorCallback(data) {
+          $scope.submitDisabled = false;
+          Alert.error($t('common.loginActivities.saveError'), data.data.message);
+        });
+      }
+    };
+  }]);

http://git-wip-us.apache.org/repos/asf/ambari/blob/5facf9a5/ambari-admin/src/main/resources/ui/admin-web/app/scripts/i18n.config.js
----------------------------------------------------------------------
diff --git a/ambari-admin/src/main/resources/ui/admin-web/app/scripts/i18n.config.js b/ambari-admin/src/main/resources/ui/admin-web/app/scripts/i18n.config.js
index 04485c6..b274965 100644
--- a/ambari-admin/src/main/resources/ui/admin-web/app/scripts/i18n.config.js
+++ b/ambari-admin/src/main/resources/ui/admin-web/app/scripts/i18n.config.js
@@ -83,7 +83,10 @@ angular.module('ambariAdminConsole')
       'loginActivities': {
         'loginActivities':'Login Activities',
         'loginMessage': 'Login Message',
-        'homeDirectory': 'Home Directory'
+        'loginMessage.placeholder': 'Please enter login message',
+        'homeDirectory': 'Home Directory',
+        'onlySimpleChars': 'Must contain only simple characters.',
+        'saveError': 'Save error'
       },
 
       'controls': {

http://git-wip-us.apache.org/repos/asf/ambari/blob/5facf9a5/ambari-admin/src/main/resources/ui/admin-web/app/styles/main.css
----------------------------------------------------------------------
diff --git a/ambari-admin/src/main/resources/ui/admin-web/app/styles/main.css b/ambari-admin/src/main/resources/ui/admin-web/app/styles/main.css
index 895a28f..f54d87c 100644
--- a/ambari-admin/src/main/resources/ui/admin-web/app/styles/main.css
+++ b/ambari-admin/src/main/resources/ui/admin-web/app/styles/main.css
@@ -682,6 +682,12 @@ table.no-border tr td{
   min-height: 63px;
 }
 
+.login-message-pane .active, .inactive {font-size:30px;cursor:pointer;float: left;margin-left: 17px;}
+.login-message-pane i.active {color: #5cb85c;margin-top: 3px;}
+.login-message-pane i.inactive {color: #d9534f;margin-top: 2px;}
+.login-message-pane .well {height: 74px;}
+.login-message-pane input {margin-left: 3px;}
+
 .views-permissions-panel .panel-body{
   padding-bottom: 0;
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/5facf9a5/ambari-admin/src/main/resources/ui/admin-web/app/views/loginActivities/loginMessage.html
----------------------------------------------------------------------
diff --git a/ambari-admin/src/main/resources/ui/admin-web/app/views/loginActivities/loginMessage.html b/ambari-admin/src/main/resources/ui/admin-web/app/views/loginActivities/loginMessage.html
index dfdb3a7..8fb7870 100644
--- a/ambari-admin/src/main/resources/ui/admin-web/app/views/loginActivities/loginMessage.html
+++ b/ambari-admin/src/main/resources/ui/admin-web/app/views/loginActivities/loginMessage.html
@@ -16,4 +16,52 @@
 * limitations under the License.
 -->
 
-Login Message
\ No newline at end of file
+<br/>
+<div class="login-message-pane" ng-controller="LoginMessageMainCtrl">
+
+  <form class="form-horizontal" novalidate name="form" autocomplete="off">
+    <div class="well">
+      <div class="form-group" ng-class="{'has-error' : (form.login_text.$error.pattern) && form.submitted}">
+        <i class="fa fa-toggle-on active"
+           ng-if="status == true"
+           ng-click="changeStatus();">
+        </i>
+        <i class="fa fa-toggle-on fa-rotate-180 inactive"
+           ng-if="status == false"
+           ng-click="changeStatus();">
+        </i>
+        <div class="col-sm-11">
+          <input type="text"
+                 ng-disabled="!status"
+                 class="form-control"
+                 name="login_text"
+                 placeholder="{{'common.loginActivities.loginMessage.placeholder' | translate}}"
+                 ng-model="text"
+                 ng-change="inputChangeEvent()"
+                 ng-pattern="/^([a-zA-Z0-9._\s]+)$/"
+                 autocomplete="off">
+
+          <div class="alert alert-danger top-margin" ng-show="form.login_text.$error.pattern && form.submitted">
+            {{'common.loginActivities.onlySimpleChars' | translate}}
+          </div>
+        </div>
+      </div>
+    </div>
+    <div class="form-group">
+      <div class="col-sm-offset-2 col-sm-10">
+        <button
+          class="btn btn-primary groupcreate-btn pull-right left-margin"
+          ng-disabled="submitDisabled"
+          ng-click="saveLoginMsg(form)">
+          {{'common.controls.save' | translate}}
+        </button>
+      </div>
+    </div>
+  </form>
+</div>
+
+
+
+
+
+


[04/39] ambari git commit: AMBARI-14812 - Alerts API Does Not Honor JPA Sorting (jonathanhurley)

Posted by nc...@apache.org.
AMBARI-14812 - Alerts API Does Not Honor JPA Sorting (jonathanhurley)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/73b5399b
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/73b5399b
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/73b5399b

Branch: refs/heads/branch-dev-patch-upgrade
Commit: 73b5399b12b4c17c119d37031d8e92ca398ca195
Parents: 29243e7
Author: Jonathan Hurley <jh...@hortonworks.com>
Authored: Wed Jan 27 10:00:12 2016 -0500
Committer: Jonathan Hurley <jh...@hortonworks.com>
Committed: Wed Jan 27 15:47:17 2016 -0500

----------------------------------------------------------------------
 .../AlertDefinitionResourceProvider.java        |  4 ++-
 .../internal/AlertResourceProvider.java         |  6 ++--
 .../ambari/server/orm/dao/AlertsDAOTest.java    | 38 ++++++++++++++++++++
 3 files changed, 45 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/73b5399b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AlertDefinitionResourceProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AlertDefinitionResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AlertDefinitionResourceProvider.java
index 263a7b8..0f73ec6 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AlertDefinitionResourceProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AlertDefinitionResourceProvider.java
@@ -23,6 +23,7 @@ import java.util.Collection;
 import java.util.EnumSet;
 import java.util.HashMap;
 import java.util.HashSet;
+import java.util.LinkedHashSet;
 import java.util.List;
 import java.util.Map;
 import java.util.Map.Entry;
@@ -223,7 +224,8 @@ public class AlertDefinitionResourceProvider extends AbstractControllerResourceP
 
     Set<String> requestPropertyIds = getRequestPropertyIds(request, predicate);
 
-    Set<Resource> results = new HashSet<Resource>();
+    // use a collection which preserves order since JPA sorts the results
+    Set<Resource> results = new LinkedHashSet<Resource>();
 
     for (Map<String, Object> propertyMap : getPropertyMaps(predicate)) {
       String clusterName = (String) propertyMap.get(ALERT_DEF_CLUSTER_NAME);

http://git-wip-us.apache.org/repos/asf/ambari/blob/73b5399b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AlertResourceProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AlertResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AlertResourceProvider.java
index 0492860..dc071e9 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AlertResourceProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AlertResourceProvider.java
@@ -21,6 +21,7 @@ import java.util.Arrays;
 import java.util.Collections;
 import java.util.HashMap;
 import java.util.HashSet;
+import java.util.LinkedHashSet;
 import java.util.List;
 import java.util.Map;
 import java.util.Set;
@@ -43,9 +44,9 @@ import org.apache.ambari.server.orm.dao.AlertsDAO;
 import org.apache.ambari.server.orm.entities.AlertCurrentEntity;
 import org.apache.ambari.server.orm.entities.AlertDefinitionEntity;
 import org.apache.ambari.server.orm.entities.AlertHistoryEntity;
+import org.apache.commons.lang.StringUtils;
 
 import com.google.inject.Inject;
-import org.apache.commons.lang.StringUtils;
 
 /**
  * ResourceProvider for Alert instances
@@ -150,7 +151,8 @@ public class AlertResourceProvider extends ReadOnlyResourceProvider implements
 
     Set<String> requestPropertyIds = getRequestPropertyIds(request, predicate);
 
-    Set<Resource> results = new HashSet<Resource>();
+    // use a collection which preserves order since JPA sorts the results
+    Set<Resource> results = new LinkedHashSet<Resource>();
 
     for (Map<String, Object> propertyMap : getPropertyMaps(predicate)) {
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/73b5399b/ambari-server/src/test/java/org/apache/ambari/server/orm/dao/AlertsDAOTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/orm/dao/AlertsDAOTest.java b/ambari-server/src/test/java/org/apache/ambari/server/orm/dao/AlertsDAOTest.java
index 0bbe998..10f099e 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/orm/dao/AlertsDAOTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/orm/dao/AlertsDAOTest.java
@@ -412,6 +412,44 @@ public class AlertsDAOTest {
   }
 
   /**
+   * Tests that the Ambari sort is correctly applied to JPA quuery.
+   *
+   * @throws Exception
+   */
+  @Test
+  public void testAlertCurrentSorting() throws Exception {
+    AlertCurrentRequest request = new AlertCurrentRequest();
+
+    Predicate clusterPredicate = new PredicateBuilder().property(
+        AlertResourceProvider.ALERT_CLUSTER_NAME).equals(m_cluster.getClusterName()).toPredicate();
+
+    request.Predicate = clusterPredicate;
+
+    SortRequestProperty sortRequestProperty = new SortRequestProperty(AlertResourceProvider.ALERT_ID, Order.ASC);
+    request.Sort = new SortRequestImpl(Collections.singletonList(sortRequestProperty));
+
+    List<AlertCurrentEntity> currentAlerts = m_dao.findAll(request);
+    assertTrue(currentAlerts.size() >= 5);
+    long lastId = Long.MIN_VALUE;
+    for (AlertCurrentEntity alert : currentAlerts) {
+      assertTrue(lastId < alert.getAlertId());
+      lastId = alert.getAlertId();
+    }
+
+    // change the sort to DESC
+    sortRequestProperty = new SortRequestProperty(AlertResourceProvider.ALERT_ID, Order.DESC);
+    request.Sort = new SortRequestImpl(Collections.singletonList(sortRequestProperty));
+
+    currentAlerts = m_dao.findAll(request);
+    assertTrue(currentAlerts.size() >= 5);
+    lastId = Long.MAX_VALUE;
+    for (AlertCurrentEntity alert : currentAlerts) {
+      assertTrue(lastId > alert.getAlertId());
+      lastId = alert.getAlertId();
+    }
+  }
+
+  /**
    * Tests that the {@link AlertCurrentEntity} fields are updated properly when
    * a new {@link AlertHistoryEntity} is associated.
    *


[10/39] ambari git commit: AMBARI-14826. Alert Definitions Table doesn't clear Alert group filter (onechiporenko)

Posted by nc...@apache.org.
AMBARI-14826. Alert Definitions Table doesn't clear Alert group filter (onechiporenko)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/7371c1b2
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/7371c1b2
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/7371c1b2

Branch: refs/heads/branch-dev-patch-upgrade
Commit: 7371c1b24979c373a53c17cc5a6e226a179c8dba
Parents: 62ff3b9
Author: Oleg Nechiporenko <on...@apache.org>
Authored: Thu Jan 28 13:23:16 2016 +0200
Committer: Oleg Nechiporenko <on...@apache.org>
Committed: Thu Jan 28 13:23:16 2016 +0200

----------------------------------------------------------------------
 ambari-web/app/views/common/table_view.js | 6 ++----
 1 file changed, 2 insertions(+), 4 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/7371c1b2/ambari-web/app/views/common/table_view.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views/common/table_view.js b/ambari-web/app/views/common/table_view.js
index 422ab4f..2ac9649 100644
--- a/ambari-web/app/views/common/table_view.js
+++ b/ambari-web/app/views/common/table_view.js
@@ -487,10 +487,8 @@ App.TableView = Em.View.extend(App.UserPref, {
    */
   clearFilters: function() {
     this.set('filterConditions', []);
-    this.get('_childViews').forEach(function(childView) {
-      if (childView['clearFilter']) {
-        childView.clearFilter();
-      }
+    this.get('childViews').forEach(function(childView) {
+      Em.tryInvoke(childView, 'clearFilter');
     });
   }
 


[03/39] ambari git commit: AMBARI-14813. Unable to create config group, when adding override (akovalenko)

Posted by nc...@apache.org.
AMBARI-14813. Unable to create config group, when adding override (akovalenko)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/29243e7f
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/29243e7f
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/29243e7f

Branch: refs/heads/branch-dev-patch-upgrade
Commit: 29243e7ff9ee401e0ad8c0c17772341f44aad5dd
Parents: ce72529
Author: Aleksandr Kovalenko <ak...@hortonworks.com>
Authored: Wed Jan 27 18:04:39 2016 +0200
Committer: Aleksandr Kovalenko <ak...@hortonworks.com>
Committed: Wed Jan 27 19:40:39 2016 +0200

----------------------------------------------------------------------
 ambari-web/app/mixins/main/service/configs/config_overridable.js | 3 ++-
 1 file changed, 2 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/29243e7f/ambari-web/app/mixins/main/service/configs/config_overridable.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/mixins/main/service/configs/config_overridable.js b/ambari-web/app/mixins/main/service/configs/config_overridable.js
index cf99baa..674db2c 100644
--- a/ambari-web/app/mixins/main/service/configs/config_overridable.js
+++ b/ambari-web/app/mixins/main/service/configs/config_overridable.js
@@ -139,7 +139,8 @@ App.ConfigOverridable = Em.Mixin.create({
             service_id: serviceId,
             service_name: serviceId,
             hosts: [],
-            desired_configs: []
+            desired_configs: [],
+            properties: []
           };
           App.store.load(App.ServiceConfigGroup, newConfigGroup);
           App.store.commit();


[26/39] ambari git commit: AMBARI-14840: Expose default_segment_num property for HAWQ (adenissov via jaoki)

Posted by nc...@apache.org.
AMBARI-14840: Expose default_segment_num property for HAWQ (adenissov via jaoki)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/69c83419
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/69c83419
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/69c83419

Branch: refs/heads/branch-dev-patch-upgrade
Commit: 69c83419517a5d87f9c460fc2b1bb8f41e50961a
Parents: ea60d29
Author: Jun Aoki <ja...@apache.org>
Authored: Fri Jan 29 13:31:46 2016 -0800
Committer: Jun Aoki <ja...@apache.org>
Committed: Fri Jan 29 13:31:46 2016 -0800

----------------------------------------------------------------------
 .../HAWQ/2.0.0/configuration/hawq-site.xml      | 14 ++++++++
 .../stacks/HDP/2.3/services/stack_advisor.py    |  9 +++++-
 .../common/services-normal-hawq-3-hosts.json    | 11 +++++++
 .../stacks/2.3/common/test_stack_advisor.py     | 34 ++++++++++++++++++++
 4 files changed, 67 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/69c83419/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/configuration/hawq-site.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/configuration/hawq-site.xml b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/configuration/hawq-site.xml
index 90982e9..f034749 100644
--- a/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/configuration/hawq-site.xml
+++ b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/configuration/hawq-site.xml
@@ -205,4 +205,18 @@
     </description>
   </property>
 
+  <property>
+    <name>default_segment_num</name>
+    <display-name>Default Number of Virtual Segments</display-name>
+    <value>24</value>
+    <description>
+      The default number of virtual segments to use when executing a query statement. When the query is actually executed,
+      the number of virtual segments may differ from this number depending on the query's needs.
+      When expanding the cluster, you should adjust this number to reflect the number of nodes in the new cluster times the number of virtual segments per node.
+    </description>
+    <value-attributes>
+      <type>int</type>
+    </value-attributes>
+  </property>
+
 </configuration>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/69c83419/ambari-server/src/main/resources/stacks/HDP/2.3/services/stack_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/services/stack_advisor.py b/ambari-server/src/main/resources/stacks/HDP/2.3/services/stack_advisor.py
index 0544f5a..3e60f05 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/services/stack_advisor.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/services/stack_advisor.py
@@ -671,7 +671,14 @@ class HDP23StackAdvisor(HDP22StackAdvisor):
     if self.isHawqMasterComponentOnAmbariServer(services):
       if "hawq-site" in services["configurations"] and "hawq_master_address_port" in services["configurations"]["hawq-site"]["properties"]:
         putHawqSiteProperty('hawq_master_address_port', '')
-          
+    # calculate optimal number of virtual segments
+    componentsListList = [service["components"] for service in services["services"]]
+    componentsList = [item["StackServiceComponents"] for sublist in componentsListList for item in sublist]
+    numSegments = len(self.__getHosts(componentsList, "HAWQSEGMENT"))
+    # update default if segments are deployed
+    if numSegments and "hawq-site" in services["configurations"] and "default_segment_num" in services["configurations"]["hawq-site"]["properties"]:
+      factor = 6 if numSegments < 50 else 4
+      putHawqSiteProperty('default_segment_num', numSegments * factor)
           
   def getServiceConfigurationValidators(self):
     parentValidators = super(HDP23StackAdvisor, self).getServiceConfigurationValidators()

http://git-wip-us.apache.org/repos/asf/ambari/blob/69c83419/ambari-server/src/test/python/stacks/2.3/common/services-normal-hawq-3-hosts.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.3/common/services-normal-hawq-3-hosts.json b/ambari-server/src/test/python/stacks/2.3/common/services-normal-hawq-3-hosts.json
index a16ab32..d10a976 100644
--- a/ambari-server/src/test/python/stacks/2.3/common/services-normal-hawq-3-hosts.json
+++ b/ambari-server/src/test/python/stacks/2.3/common/services-normal-hawq-3-hosts.json
@@ -358,6 +358,17 @@
         "type" : "hawq-site.xml"
       },
       "dependencies" : [ ]
+    }, {
+      "href" : "/api/v1/stacks/HDP/versions/2.3/services/HAWQ/configurations/default_segment_num",
+      "StackConfigurations" : {
+        "property_depends_on" : [ ],
+        "property_name" : "default_segment_num",
+        "service_name" : "HAWQ",
+        "stack_name" : "HDP",
+        "stack_version" : "2.3",
+        "type" : "hawq-site.xml"
+      },
+      "dependencies" : [ ]
     } ]
   }, {
     "href" : "/api/v1/stacks/HDP/versions/2.3/services/HDFS",

http://git-wip-us.apache.org/repos/asf/ambari/blob/69c83419/ambari-server/src/test/python/stacks/2.3/common/test_stack_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.3/common/test_stack_advisor.py b/ambari-server/src/test/python/stacks/2.3/common/test_stack_advisor.py
index 362a7cd..e345e2a 100644
--- a/ambari-server/src/test/python/stacks/2.3/common/test_stack_advisor.py
+++ b/ambari-server/src/test/python/stacks/2.3/common/test_stack_advisor.py
@@ -1316,6 +1316,40 @@ class TestHDP23StackAdvisor(TestCase):
     self.stackAdvisor.recommendTezConfigurations(configurations, clusterData, services, hosts)
     self.assertEquals(configurations, expected)
 
+  def test_recommendHAWQConfigurations(self):
+
+    # original cluster data with 3 segments
+    services = self.load_json("services-normal-hawq-3-hosts.json")
+    componentsListList = [service["components"] for service in services["services"]]
+    componentsList = [item for sublist in componentsListList for item in sublist]
+    hawqSegmentComponent = [component["StackServiceComponents"] for component in componentsList if component["StackServiceComponents"]["component_name"] == "HAWQSEGMENT"][0]
+    services["configurations"]["hawq-site"] = {"properties": {"default_segment_num": "24"}}
+
+    configurations = {}
+    clusterData = {}
+
+    # Test 1 - with 3 segments
+    self.assertEquals(len(hawqSegmentComponent["hostnames"]), 3)
+    self.stackAdvisor.recommendHAWQConfigurations(configurations, clusterData, services, None)
+    self.assertEquals(configurations["hawq-site"]["properties"]["default_segment_num"], str(3 * 6))
+
+    # Test 2 - with 49 segments
+    hawqSegmentComponent["hostnames"] = ["host" + str(i) for i in range(49)]
+    self.stackAdvisor.recommendHAWQConfigurations(configurations, clusterData, services, None)
+    self.assertEquals(configurations["hawq-site"]["properties"]["default_segment_num"], str(49 * 6))
+
+    # Test 3 - with 50 segments (threshold for new factor)
+    hawqSegmentComponent["hostnames"] = ["host" + str(i) for i in range(50)]
+    self.stackAdvisor.recommendHAWQConfigurations(configurations, clusterData, services, None)
+    self.assertEquals(configurations["hawq-site"]["properties"]["default_segment_num"], str(50 * 4))
+
+    # Test 4 - with no segments
+    configurations = {}
+    hawqSegmentComponent["hostnames"] = []
+    self.stackAdvisor.recommendHAWQConfigurations(configurations, clusterData, services, None)
+    self.assertEquals(configurations, {'hawq-site': {'properties': {}}})
+
+
   def test_validateHiveConfigurations(self):
     properties = {"hive_security_authorization": "None",
                   "hive.exec.orc.default.stripe.size": "8388608",


[23/39] ambari git commit: AMBARI-14845. Kerberized cluster with RM HA contain incrorrect proxyuser yarn properties.(vbrodetskyi)

Posted by nc...@apache.org.
AMBARI-14845. Kerberized cluster with RM HA contain incrorrect proxyuser yarn properties.(vbrodetskyi)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/ea60d298
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/ea60d298
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/ea60d298

Branch: refs/heads/branch-dev-patch-upgrade
Commit: ea60d2980beb0bfe78fb312c583b3af928661d6e
Parents: 6bac8ec
Author: Vitaly Brodetskyi <vb...@hortonworks.com>
Authored: Fri Jan 29 16:29:16 2016 +0200
Committer: Vitaly Brodetskyi <vb...@hortonworks.com>
Committed: Fri Jan 29 16:29:16 2016 +0200

----------------------------------------------------------------------
 .../main/resources/common-services/YARN/2.1.0.2.0/kerberos.json  | 4 ++--
 .../main/resources/stacks/HDP/2.2/services/YARN/kerberos.json    | 4 ++--
 .../main/resources/stacks/HDP/2.3/services/YARN/kerberos.json    | 4 ++--
 3 files changed, 6 insertions(+), 6 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/ea60d298/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/kerberos.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/kerberos.json b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/kerberos.json
index ff2f484..9afe668 100644
--- a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/kerberos.json
+++ b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/kerberos.json
@@ -36,8 +36,8 @@
         },
         {
           "core-site": {
-            "hadoop.proxyuser.yarn.groups": "*",
-            "hadoop.proxyuser.yarn.hosts": "${yarn-site/yarn.resourcemanager.hostname}"
+            "hadoop.proxyuser.${yarn-env/yarn_user}.groups": "*",
+            "hadoop.proxyuser.${yarn-env/yarn_user}.hosts": "${clusterHostInfo/rm_host}"
           }
         }
       ],

http://git-wip-us.apache.org/repos/asf/ambari/blob/ea60d298/ambari-server/src/main/resources/stacks/HDP/2.2/services/YARN/kerberos.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2/services/YARN/kerberos.json b/ambari-server/src/main/resources/stacks/HDP/2.2/services/YARN/kerberos.json
index 2f74aba..240f61e 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.2/services/YARN/kerberos.json
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2/services/YARN/kerberos.json
@@ -37,8 +37,8 @@
         },
         {
           "core-site": {
-            "hadoop.proxyuser.yarn.groups": "*",
-            "hadoop.proxyuser.yarn.hosts": "${yarn-site/yarn.resourcemanager.hostname}"
+            "hadoop.proxyuser.${yarn-env/yarn_user}.groups": "*",
+            "hadoop.proxyuser.${yarn-env/yarn_user}.hosts": "${clusterHostInfo/rm_host}"
           }
         }
       ],

http://git-wip-us.apache.org/repos/asf/ambari/blob/ea60d298/ambari-server/src/main/resources/stacks/HDP/2.3/services/YARN/kerberos.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/services/YARN/kerberos.json b/ambari-server/src/main/resources/stacks/HDP/2.3/services/YARN/kerberos.json
index 70d12b4..22fed42 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/services/YARN/kerberos.json
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/services/YARN/kerberos.json
@@ -37,8 +37,8 @@
         },
         {
           "core-site": {
-            "hadoop.proxyuser.yarn.groups": "*",
-            "hadoop.proxyuser.yarn.hosts": "${yarn-site/yarn.resourcemanager.hostname}"
+            "hadoop.proxyuser.${yarn-env/yarn_user}.groups": "*",
+            "hadoop.proxyuser.${yarn-env/yarn_user}.hosts": "${clusterHostInfo/rm_host}"
           }
         },
         {


[18/39] ambari git commit: AMBARI-14822 Combo Search: Create auto suggest for Host attribute filters (Joe Wang via rzang)

Posted by nc...@apache.org.
AMBARI-14822 Combo Search: Create auto suggest for Host attribute filters (Joe Wang via rzang)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/b037ef5c
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/b037ef5c
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/b037ef5c

Branch: refs/heads/branch-dev-patch-upgrade
Commit: b037ef5cfc9cfd72126818cf8c5c5e7c3525dc52
Parents: cad69e6
Author: Richard Zang <rz...@apache.org>
Authored: Fri Jan 29 08:58:08 2016 +0800
Committer: Richard Zang <rz...@apache.org>
Committed: Fri Jan 29 08:58:08 2016 +0800

----------------------------------------------------------------------
 .../controllers/main/host/combo_search_box.js   | 30 +++++++++++---------
 ambari-web/app/utils/ajax/ajax.js               | 15 ++++++++++
 .../app/views/main/host/combo_search_box.js     |  1 +
 ambari-web/vendor/scripts/visualsearch.js       |  2 +-
 4 files changed, 34 insertions(+), 14 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/b037ef5c/ambari-web/app/controllers/main/host/combo_search_box.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/controllers/main/host/combo_search_box.js b/ambari-web/app/controllers/main/host/combo_search_box.js
index 3832611..1c2a87f 100644
--- a/ambari-web/app/controllers/main/host/combo_search_box.js
+++ b/ambari-web/app/controllers/main/host/combo_search_box.js
@@ -21,6 +21,7 @@ var App = require('app');
 App.MainHostComboSearchBoxController = Em.Controller.extend({
   name: 'mainHostComboSearchBoxController',
   currentSuggestion: [],
+  page_size: 10,
 
   VSCallbacks : {
     search: function (query, searchCollection) {
@@ -42,12 +43,12 @@ App.MainHostComboSearchBoxController = Em.Controller.extend({
     },
 
     facetMatches: function (callback) {
-      console.log('called');
       callback([
-        {label: 'name', category: 'Host'},
+        {label: 'host_name', category: 'Host'},
         {label: 'ip', category: 'Host'},
         {label: 'version', category: 'Host'},
         {label: 'health', category: 'Host'},
+        {label: 'rack', category: 'Host'},
         {label: 'service', category: 'Service'},
         {label: 'component', category: 'Service'},
         {label: 'state', category: 'Service'}
@@ -57,13 +58,11 @@ App.MainHostComboSearchBoxController = Em.Controller.extend({
     valueMatches: function (facet, searchTerm, callback) {
       var controller = App.router.get('mainHostComboSearchBoxController');
       switch (facet) {
-        case 'name':
-          controller.getHostPropertySuggestions('name', searchTerm).done(function() {
-            callback(controller.get('currentSuggestion'));
-          });
-          break;
+        case 'host_name':
         case 'ip':
-          callback(App.Host.find().toArray().mapProperty('ip'));
+          controller.getPropertySuggestions(facet, searchTerm).done(function() {
+            callback(controller.get('currentSuggestion'), {preserveMatches: true});
+          });
           break;
         case 'rack':
           callback(App.Host.find().toArray().mapProperty('rack').uniq());
@@ -104,18 +103,23 @@ App.MainHostComboSearchBoxController = Em.Controller.extend({
     }
   },
 
-  getHostPropertySuggestions: function(facet, searchTerm) {
+  getPropertySuggestions: function(facet, searchTerm) {
     return App.ajax.send({
-      name: 'hosts.all.install',
+      name: 'hosts.with_searchTerm',
       sender: this,
-      success: 'updateHostNameSuggestion',
+      data: {
+        facet: facet,
+        searchTerm: searchTerm,
+        page_size: this.get('page_size')
+      },
+      success: 'getPropertySuggestionsSuccess',
       error: 'commonSuggestionErrorCallback'
     });
   },
 
-  updateHostNameSuggestion: function(data) {
+  getPropertySuggestionsSuccess: function(data, opt, params) {
     this.updateSuggestion(data.items.map(function(item) {
-      return item.Hosts.host_name;
+      return item.Hosts[params.facet];
     }));
   },
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/b037ef5c/ambari-web/app/utils/ajax/ajax.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/utils/ajax/ajax.js b/ambari-web/app/utils/ajax/ajax.js
index df3782f..4d8d195 100644
--- a/ambari-web/app/utils/ajax/ajax.js
+++ b/ambari-web/app/utils/ajax/ajax.js
@@ -2358,6 +2358,21 @@ var urls = {
     'real': '/clusters/{clusterName}/hosts?fields=Hosts/cpu_count,Hosts/disk_info,Hosts/total_mem,Hosts/os_type,Hosts/os_arch,Hosts/ip,host_components/HostRoles/state&minimal_response=true',
     'mock': '/data/hosts/HDP2/hosts.json'
   },
+  'hosts.with_searchTerm': {
+    'real': '/clusters/{clusterName}/hosts?fields=Hosts/{facet}&minimal_response=true&page_size={page_size}',
+    'mock': '',
+    format: function (data) {
+      return {
+        headers: {
+          'X-Http-Method-Override': 'GET'
+        },
+        type: 'POST',
+        data: JSON.stringify({
+          "RequestInfo": {"query": (data.searchTerm ? "Hosts/"+ data.facet +".matches(.*" + data.searchTerm + ".*)" : "")}
+        })
+      };
+    }
+  },
   'host_components.all': {
     'real': '/clusters/{clusterName}/host_components?fields=HostRoles/host_name&minimal_response=true',
     'mock': ''

http://git-wip-us.apache.org/repos/asf/ambari/blob/b037ef5c/ambari-web/app/views/main/host/combo_search_box.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views/main/host/combo_search_box.js b/ambari-web/app/views/main/host/combo_search_box.js
index 36af44d..0ab6029 100644
--- a/ambari-web/app/views/main/host/combo_search_box.js
+++ b/ambari-web/app/views/main/host/combo_search_box.js
@@ -29,6 +29,7 @@ App.MainHostComboSearchBoxView = Em.View.extend({
       container: $('#combo_search_box'),
       query: '',
       showFacets: true,
+      delay: 1000,
       unquotable: [
         'text'
       ],

http://git-wip-us.apache.org/repos/asf/ambari/blob/b037ef5c/ambari-web/vendor/scripts/visualsearch.js
----------------------------------------------------------------------
diff --git a/ambari-web/vendor/scripts/visualsearch.js b/ambari-web/vendor/scripts/visualsearch.js
index 044977c..ef07680 100644
--- a/ambari-web/vendor/scripts/visualsearch.js
+++ b/ambari-web/vendor/scripts/visualsearch.js
@@ -612,7 +612,7 @@
             this.box.autocomplete({
                 source    : _.bind(this.autocompleteValues, this),
                 minLength : 0,
-                delay     : 0,
+                delay     : this.app.options.delay,
                 autoFocus : true,
                 position  : {offset : "0 5"},
                 create    : _.bind(function(e, ui) {


[17/39] ambari git commit: Revert "AMBARI-14822 Combo Search: Create auto suggest for Host attribute filters (Joe Wang via rzang)"

Posted by nc...@apache.org.
Revert "AMBARI-14822 Combo Search: Create auto suggest for Host attribute filters (Joe Wang via rzang)"

This reverts commit 5e09053deb0700b09ced4f8f12902ff2ea60154a.


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/cad69e61
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/cad69e61
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/cad69e61

Branch: refs/heads/branch-dev-patch-upgrade
Commit: cad69e6132b554386e82d45950fc1b302f689af7
Parents: 5e09053
Author: Richard Zang <rz...@apache.org>
Authored: Fri Jan 29 08:51:50 2016 +0800
Committer: Richard Zang <rz...@apache.org>
Committed: Fri Jan 29 08:51:50 2016 +0800

----------------------------------------------------------------------
 .../main/admin/stack_and_upgrade_controller.js  | 22 ++------------------
 ambari-web/app/messages.js                      |  2 --
 .../stack_upgrade/stack_upgrade_wizard.hbs      |  8 +++----
 ambari-web/app/utils/ajax/ajax.js               |  3 ---
 4 files changed, 6 insertions(+), 29 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/cad69e61/ambari-web/app/controllers/main/admin/stack_and_upgrade_controller.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/controllers/main/admin/stack_and_upgrade_controller.js b/ambari-web/app/controllers/main/admin/stack_and_upgrade_controller.js
index 16a58d6..f80c5ea 100644
--- a/ambari-web/app/controllers/main/admin/stack_and_upgrade_controller.js
+++ b/ambari-web/app/controllers/main/admin/stack_and_upgrade_controller.js
@@ -553,15 +553,13 @@ App.MainAdminStackAndUpgradeController = Em.Controller.extend(App.LocalStorage,
    * abort upgrade (in order to start Downgrade)
    */
   abortUpgrade: function () {
-    var errorCallback = this.get('isDowngrade') ? 'abortDowngradeErrorCallback' : 'abortUpgradeErrorCallback';
     return App.ajax.send({
       name: 'admin.upgrade.abort',
       sender: this,
       data: {
-        upgradeId: this.get('upgradeId'),
-        isDowngrade: this.get('isDowngrade')
+        upgradeId: this.get('upgradeId')
       },
-      error: errorCallback
+      error: 'abortUpgradeErrorCallback'
     });
   },
 
@@ -581,22 +579,6 @@ App.MainAdminStackAndUpgradeController = Em.Controller.extend(App.LocalStorage,
     App.showAlertPopup(header, body);
   },
 
-  /**
-   * error callback of <code>abortDowngrade()</code>
-   * @param {object} data
-   */
-  abortDowngradeErrorCallback: function (data) {
-    var header = Em.I18n.t('admin.stackDowngrade.state.paused.fail.header');
-    var body = Em.I18n.t('admin.stackDowngrade.state.paused.fail.body');
-    if(data && data.responseText){
-      try {
-        var json = $.parseJSON(data.responseText);
-        body = body + ' ' + json.message;
-      } catch (err) {}
-    }
-    App.showAlertPopup(header, body);
-  },
-
   retryUpgrade: function () {
     this.setProperties({
       requestInProgress: true,

http://git-wip-us.apache.org/repos/asf/ambari/blob/cad69e61/ambari-web/app/messages.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/messages.js b/ambari-web/app/messages.js
index 916ca2a..d6ecbf3 100644
--- a/ambari-web/app/messages.js
+++ b/ambari-web/app/messages.js
@@ -1497,8 +1497,6 @@ Em.I18n.translations = {
   'admin.stackVersions.version.downgrade.suspended': "Downgrade: Paused",
   'admin.stackUpgrade.state.paused.fail.header': "Pause Upgrade failed",
   'admin.stackUpgrade.state.paused.fail.body': "Upgrade could not be paused. Try again later.",
-  'admin.stackDowngrade.state.paused.fail.header': "Pause Downgrade failed",
-  'admin.stackDowngrade.state.paused.fail.body': "Downgrade could not be paused. Try again later.",
 
   'admin.stackVersions.version.upgrade.upgradeOptions.header': "Upgrade Options",
   'admin.stackVersions.version.upgrade.upgradeOptions.bodyMsg.version': "You are about to perform an upgrade to <b>{0}</b>.",

http://git-wip-us.apache.org/repos/asf/ambari/blob/cad69e61/ambari-web/app/templates/main/admin/stack_upgrade/stack_upgrade_wizard.hbs
----------------------------------------------------------------------
diff --git a/ambari-web/app/templates/main/admin/stack_upgrade/stack_upgrade_wizard.hbs b/ambari-web/app/templates/main/admin/stack_upgrade/stack_upgrade_wizard.hbs
index 02022cf..a13e2c2 100644
--- a/ambari-web/app/templates/main/admin/stack_upgrade/stack_upgrade_wizard.hbs
+++ b/ambari-web/app/templates/main/admin/stack_upgrade/stack_upgrade_wizard.hbs
@@ -81,7 +81,7 @@
                     <button class="btn btn-danger" {{bindAttr disabled="controller.requestInProgress"}} {{action confirmDowngrade view.failedItem target="controller"}}>{{t common.downgrade}}</button>
                   {{/if}}
                   {{#if isDowngrade}}
-                    <button class="btn" {{action pauseUpgrade target="view"}}>{{t admin.stackUpgrade.pauseDowngrade}}</button>
+                    <button class="btn" {{action closeWizard target="view.parentView"}}>{{t admin.stackUpgrade.pauseDowngrade}}</button>
                   {{else}}
                     <button class="btn" {{action pauseUpgrade target="view"}}>{{t admin.stackUpgrade.pauseUpgrade}}</button>
                   {{/if}}
@@ -108,7 +108,7 @@
                 <button class="btn btn-danger" {{bindAttr disabled="controller.requestInProgress"}} {{action confirmDowngrade view.manualItem target="controller"}}>{{t common.downgrade}}</button>
               {{/if}}
               {{#if isDowngrade}}
-                <button class="btn" {{action pauseUpgrade target="view"}}>{{t admin.stackUpgrade.pauseDowngrade}}</button>
+                <button class="btn" {{action closeWizard target="view.parentView"}}>{{t admin.stackUpgrade.pauseDowngrade}}</button>
               {{else}}
                 <button class="btn" {{action pauseUpgrade target="view"}}>{{t admin.stackUpgrade.pauseUpgrade}}</button>
               {{/if}}
@@ -189,7 +189,7 @@
                 <button class="btn btn-danger" {{bindAttr disabled="controller.requestInProgress"}} {{action confirmDowngrade view.manualItem target="controller"}}>{{t common.downgrade}}</button>
               {{/if}}
               {{#if isDowngrade}}
-                <button class="btn" {{action pauseUpgrade target="view"}}>{{t admin.stackUpgrade.pauseDowngrade}}</button>
+                <button class="btn" {{action closeWizard target="view.parentView"}}>{{t admin.stackUpgrade.pauseDowngrade}}</button>
               {{else}}
                 <button class="btn" {{action pauseUpgrade target="view"}}>{{t admin.stackUpgrade.pauseUpgrade}}</button>
               {{/if}}
@@ -252,7 +252,7 @@
                   <button class="btn btn-danger" {{bindAttr disabled="controller.requestInProgress"}} {{action confirmDowngrade view.manualItem target="controller"}}>{{t common.downgrade}}</button>
               {{/if}}
               {{#if isDowngrade}}
-                  <button class="btn" {{action pauseUpgrade target="view"}}>{{t admin.stackUpgrade.pauseDowngrade}}</button>
+                  <button class="btn" {{action closeWizard target="view.parentView"}}>{{t admin.stackUpgrade.pauseDowngrade}}</button>
               {{else}}
                   <button class="btn" {{action pauseUpgrade target="view"}}>{{t admin.stackUpgrade.pauseUpgrade}}</button>
               {{/if}}

http://git-wip-us.apache.org/repos/asf/ambari/blob/cad69e61/ambari-web/app/utils/ajax/ajax.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/utils/ajax/ajax.js b/ambari-web/app/utils/ajax/ajax.js
index a301148..df3782f 100644
--- a/ambari-web/app/utils/ajax/ajax.js
+++ b/ambari-web/app/utils/ajax/ajax.js
@@ -1666,9 +1666,6 @@ var urls = {
     'format': function (data) {
       return {
         data: JSON.stringify({
-          "RequestInfo": {
-            "downgrade": data.isDowngrade
-          },
           "Upgrade": {
             "request_status": "ABORTED"
           }


[09/39] ambari git commit: AMBARI-14824 Unexpected icon for service check during rolling upgrade while Skip all Service Check failures enabled. (atkach)

Posted by nc...@apache.org.
AMBARI-14824 Unexpected icon for service check during rolling upgrade while Skip all Service Check failures enabled. (atkach)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/62ff3b96
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/62ff3b96
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/62ff3b96

Branch: refs/heads/branch-dev-patch-upgrade
Commit: 62ff3b963052cdf73b2913ea3b162e0586016bc8
Parents: d38a953
Author: Andrii Tkach <at...@hortonworks.com>
Authored: Thu Jan 28 10:54:08 2016 +0200
Committer: Andrii Tkach <at...@hortonworks.com>
Committed: Thu Jan 28 10:54:08 2016 +0200

----------------------------------------------------------------------
 ambari-web/app/utils/ajax/ajax.js | 1 +
 1 file changed, 1 insertion(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/62ff3b96/ambari-web/app/utils/ajax/ajax.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/utils/ajax/ajax.js b/ambari-web/app/utils/ajax/ajax.js
index c4a1611..df3782f 100644
--- a/ambari-web/app/utils/ajax/ajax.js
+++ b/ambari-web/app/utils/ajax/ajax.js
@@ -1578,6 +1578,7 @@ var urls = {
     'Upgrade/progress_percent,Upgrade/request_context,Upgrade/request_status,Upgrade/direction,Upgrade/downgrade_allowed,' +
     'upgrade_groups/UpgradeGroup,' +
     'upgrade_groups/upgrade_items/UpgradeItem/status,' +
+    'upgrade_groups/upgrade_items/UpgradeItem/display_status,' +
     'upgrade_groups/upgrade_items/UpgradeItem/context,' +
     'upgrade_groups/upgrade_items/UpgradeItem/group_id,' +
     'upgrade_groups/upgrade_items/UpgradeItem/progress_percent,' +


[20/39] ambari git commit: AMBARI-14819 - RU : Storm Topologies stopped running while rolling upgrade (jonathanhurley)

Posted by nc...@apache.org.
AMBARI-14819 - RU : Storm Topologies stopped running while rolling upgrade (jonathanhurley)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/a2f436a6
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/a2f436a6
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/a2f436a6

Branch: refs/heads/branch-dev-patch-upgrade
Commit: a2f436a610b130799c49fdafa30b11f6de1c8157
Parents: 981ede5
Author: Jonathan Hurley <jh...@hortonworks.com>
Authored: Wed Jan 27 16:38:32 2016 -0500
Committer: Jonathan Hurley <jh...@hortonworks.com>
Committed: Thu Jan 28 23:12:48 2016 -0500

----------------------------------------------------------------------
 .../stacks/HDP/2.3/upgrades/upgrade-2.4.xml     | 76 --------------------
 1 file changed, 76 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/a2f436a6/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.4.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.4.xml b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.4.xml
index 33a9b16..660dbe2 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.4.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.4.xml
@@ -747,105 +747,29 @@
 
     <service name="STORM">
       <component name="NIMBUS">
-        <pre-downgrade>
-          <task xsi:type="execute" summary="Removing Storm data from ZooKeeper">
-            <script>scripts/storm_upgrade.py</script>
-            <function>delete_storm_zookeeper_data</function>
-          </task>
-
-          <task xsi:type="execute" summary="Removing local Storm data">
-            <script>scripts/storm_upgrade.py</script>
-            <function>delete_storm_local_data</function>
-          </task>
-        </pre-downgrade>
-      
-        <pre-upgrade>
-          <task xsi:type="manual">
-            <message>Before continuing, please deactivate and kill any currently running topologies.</message>
-          </task>
-
-          <task xsi:type="execute" summary="Removing Storm data from ZooKeeper">
-            <script>scripts/storm_upgrade.py</script>
-            <function>delete_storm_zookeeper_data</function>
-          </task>
-
-          <task xsi:type="execute" summary="Removing local Storm data">
-            <script>scripts/storm_upgrade.py</script>
-            <function>delete_storm_local_data</function>
-          </task>
-
-        </pre-upgrade>
         <upgrade>
           <task xsi:type="restart-task" />
         </upgrade>
       </component>
-
       <component name="STORM_REST_API">
-        <pre-upgrade>
-          <task xsi:type="execute" summary="Removing local Storm data">
-            <script>scripts/storm_upgrade.py</script>
-            <function>delete_storm_local_data</function>
-          </task>
-        </pre-upgrade>
-
-        <pre-downgrade>
-          <task xsi:type="manual">
-            <message>Before continuing, please deactivate and kill any currently running topologies.</message>
-          </task>
-
-          <task xsi:type="execute" summary="Removing local Storm data">
-            <script>scripts/storm_upgrade.py</script>
-            <function>delete_storm_local_data</function>
-          </task>
-        </pre-downgrade>
-
         <upgrade>
           <task xsi:type="restart-task" />
         </upgrade>
       </component>
-
       <component name="SUPERVISOR">
-        <pre-upgrade>
-          <task xsi:type="execute" summary="Removing local Storm data">
-            <script>scripts/storm_upgrade.py</script>
-            <function>delete_storm_local_data</function>
-          </task>
-        </pre-upgrade>
         <upgrade>
           <task xsi:type="restart-task" />
         </upgrade>
       </component>
-
       <component name="STORM_UI_SERVER">
-        <pre-upgrade>
-          <task xsi:type="execute" summary="Removing local Storm data">
-            <script>scripts/storm_upgrade.py</script>
-            <function>delete_storm_local_data</function>
-          </task>
-        </pre-upgrade>
-
         <upgrade>
           <task xsi:type="restart-task" />
         </upgrade>
       </component>
-
       <component name="DRPC_SERVER">
-        <pre-upgrade>
-          <task xsi:type="execute" summary="Removing local Storm data">
-            <script>scripts/storm_upgrade.py</script>
-            <function>delete_storm_local_data</function>
-          </task>
-        </pre-upgrade>
-
         <upgrade>
           <task xsi:type="restart-task" />
         </upgrade>
-
-        <post-upgrade>
-          <task xsi:type="manual">
-            <message>Please rebuild your topology using the new Storm version dependencies and resubmit it using the newly created jar.</message>
-          </task>
-        </post-upgrade>
       </component>
     </service>
 


[05/39] ambari git commit: AMBARI-14806. Provide Metrics discovery API for AMS. (swagle)

Posted by nc...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/646fb429/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/discovery/TestMetadataManager.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/discovery/TestMetadataManager.java b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/discovery/TestMetadataManager.java
new file mode 100644
index 0000000..bf887be
--- /dev/null
+++ b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/discovery/TestMetadataManager.java
@@ -0,0 +1,112 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.discovery;
+
+import junit.framework.Assert;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.metrics2.sink.timeline.TimelineMetric;
+import org.apache.hadoop.metrics2.sink.timeline.TimelineMetricMetadata;
+import org.apache.hadoop.metrics2.sink.timeline.TimelineMetrics;
+import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.AbstractMiniHBaseClusterTest;
+import org.junit.Before;
+import org.junit.Test;
+
+import java.io.IOException;
+import java.sql.SQLException;
+import java.util.Map;
+import java.util.Set;
+import java.util.TreeMap;
+
+public class TestMetadataManager extends AbstractMiniHBaseClusterTest {
+  TimelineMetricMetadataManager metadataManager;
+
+  @Before
+  public void insertDummyRecords() throws IOException, SQLException {
+    // Initialize new manager
+    metadataManager = new TimelineMetricMetadataManager(hdb, new Configuration());
+    final long now = System.currentTimeMillis();
+
+    TimelineMetrics timelineMetrics = new TimelineMetrics();
+    TimelineMetric metric1 = new TimelineMetric();
+    metric1.setMetricName("dummy_metric1");
+    metric1.setHostName("dummy_host1");
+    metric1.setTimestamp(now);
+    metric1.setStartTime(now - 1000);
+    metric1.setAppId("dummy_app1");
+    metric1.setType("Integer");
+    metric1.setMetricValues(new TreeMap<Long, Double>() {{
+      put(now - 100, 1.0);
+      put(now - 200, 2.0);
+      put(now - 300, 3.0);
+    }});
+    timelineMetrics.getMetrics().add(metric1);
+    TimelineMetric metric2 = new TimelineMetric();
+    metric2.setMetricName("dummy_metric2");
+    metric2.setHostName("dummy_host2");
+    metric2.setTimestamp(now);
+    metric2.setStartTime(now - 1000);
+    metric2.setAppId("dummy_app2");
+    metric2.setType("Integer");
+    metric2.setMetricValues(new TreeMap<Long, Double>() {{
+      put(now - 100, 1.0);
+      put(now - 200, 2.0);
+      put(now - 300, 3.0);
+    }});
+    timelineMetrics.getMetrics().add(metric2);
+
+    hdb.insertMetricRecordsWithMetadata(metadataManager, timelineMetrics);
+  }
+
+  @Test
+  public void testSaveMetricsMetadata() throws Exception {
+    Map<TimelineMetricMetadataKey, TimelineMetricMetadata> cachedData = metadataManager.getMetadataCache();
+
+    Assert.assertNotNull(cachedData);
+    Assert.assertEquals(2, cachedData.size());
+    TimelineMetricMetadataKey key1 = new TimelineMetricMetadataKey("dummy_metric1", "dummy_app1");
+    TimelineMetricMetadataKey key2 = new TimelineMetricMetadataKey("dummy_metric2", "dummy_app2");
+    TimelineMetricMetadata value1 = new TimelineMetricMetadata("dummy_metric1",
+      "dummy_app1", "Integer", null, 1L, true);
+    TimelineMetricMetadata value2 = new TimelineMetricMetadata("dummy_metric2",
+      "dummy_app2", "Integer", null, 1L, true);
+
+    Assert.assertEquals(value1, cachedData.get(key1));
+    Assert.assertEquals(value2, cachedData.get(key2));
+
+    TimelineMetricMetadataSync syncRunnable = new TimelineMetricMetadataSync(metadataManager);
+    syncRunnable.run();
+
+    Map<TimelineMetricMetadataKey, TimelineMetricMetadata> savedData =
+      hdb.getTimelineMetricMetadata();
+
+    Assert.assertNotNull(savedData);
+    Assert.assertEquals(2, savedData.size());
+    Assert.assertEquals(value1, savedData.get(key1));
+    Assert.assertEquals(value2, savedData.get(key2));
+
+    Map<String, Set<String>> cachedHostData = metadataManager.getHostedAppsCache();
+    Map<String, Set<String>> savedHostData = metadataManager.getPersistedHostedAppsData();
+    Assert.assertEquals(cachedData.size(), savedData.size());
+    Assert.assertEquals("dummy_app1", cachedHostData.get("dummy_host1").iterator().next());
+    Assert.assertEquals("dummy_app2", cachedHostData.get("dummy_host2").iterator().next());
+    Assert.assertEquals("dummy_app1", savedHostData.get("dummy_host1").iterator().next());
+    Assert.assertEquals("dummy_app2", savedHostData.get("dummy_host2").iterator().next());
+  }
+
+
+}


[29/39] ambari git commit: AMBARI-14844. Ambari Admin: incorrect 'users' text on the Roles page (akovalenko)

Posted by nc...@apache.org.
AMBARI-14844. Ambari Admin: incorrect 'users' text on the Roles page (akovalenko)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/1be4a2d3
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/1be4a2d3
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/1be4a2d3

Branch: refs/heads/branch-dev-patch-upgrade
Commit: 1be4a2d3bd4899270c1c52c33fbc4dd5fb701814
Parents: 5facf9a
Author: Aleksandr Kovalenko <ak...@hortonworks.com>
Authored: Fri Jan 29 18:33:46 2016 +0200
Committer: Aleksandr Kovalenko <ak...@hortonworks.com>
Committed: Sat Jan 30 03:06:17 2016 +0200

----------------------------------------------------------------------
 .../app/scripts/controllers/clusters/UserAccessListCtrl.js       | 2 +-
 .../src/main/resources/ui/admin-web/app/scripts/i18n.config.js   | 1 +
 .../ui/admin-web/app/views/clusters/userAccessList.html          | 4 ++--
 3 files changed, 4 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/1be4a2d3/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/clusters/UserAccessListCtrl.js
----------------------------------------------------------------------
diff --git a/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/clusters/UserAccessListCtrl.js b/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/clusters/UserAccessListCtrl.js
index eed71d5..17dac40 100644
--- a/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/clusters/UserAccessListCtrl.js
+++ b/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/clusters/UserAccessListCtrl.js
@@ -22,7 +22,7 @@ angular.module('ambariAdminConsole')
 function($scope, $location, Cluster, $modal, $rootScope, $routeParams, PermissionSaver, Alert, $translate) {
   var $t = $translate.instant;
   $scope.constants = {
-    users: $t('common.users').toLowerCase()
+    usersGroups: $t('common.usersGroups').toLowerCase()
   };
   $scope.users = [];
   $scope.usersPerPage = 10;

http://git-wip-us.apache.org/repos/asf/ambari/blob/1be4a2d3/ambari-admin/src/main/resources/ui/admin-web/app/scripts/i18n.config.js
----------------------------------------------------------------------
diff --git a/ambari-admin/src/main/resources/ui/admin-web/app/scripts/i18n.config.js b/ambari-admin/src/main/resources/ui/admin-web/app/scripts/i18n.config.js
index b274965..c83d627 100644
--- a/ambari-admin/src/main/resources/ui/admin-web/app/scripts/i18n.config.js
+++ b/ambari-admin/src/main/resources/ui/admin-web/app/scripts/i18n.config.js
@@ -74,6 +74,7 @@ angular.module('ambariAdminConsole')
       'jwt': 'JWT',
       'warning': 'Warning',
       'filterInfo': '{{showed}} of {{total}} {{term}} showing',
+      'usersGroups': 'Users/Groups',
 
       'clusterNameChangeConfirmation': {
         'title': 'Confirm Cluster Name Change',

http://git-wip-us.apache.org/repos/asf/ambari/blob/1be4a2d3/ambari-admin/src/main/resources/ui/admin-web/app/views/clusters/userAccessList.html
----------------------------------------------------------------------
diff --git a/ambari-admin/src/main/resources/ui/admin-web/app/views/clusters/userAccessList.html b/ambari-admin/src/main/resources/ui/admin-web/app/views/clusters/userAccessList.html
index 0f3da0e..8c12a25 100644
--- a/ambari-admin/src/main/resources/ui/admin-web/app/views/clusters/userAccessList.html
+++ b/ambari-admin/src/main/resources/ui/admin-web/app/views/clusters/userAccessList.html
@@ -73,11 +73,11 @@
     </tbody>
   </table>
   <div class="alert alert-info col-sm-12" ng-show="!users.length">
-    {{'common.alerts.nothingToDisplay' | translate: '{term: constants.users}'}}
+    {{'common.alerts.nothingToDisplay' | translate: '{term: constants.usersGroups}'}}
   </div>
   <div class="col-sm-12 table-bar">
     <div class="pull-left filtered-info">
-      <span>{{'common.filterInfo' | translate: '{showed: tableInfo.showed, total: tableInfo.total, term: constants.users}'}}</span>
+      <span>{{'common.filterInfo' | translate: '{showed: tableInfo.showed, total: tableInfo.total, term: constants.usersGroups}'}}</span>
       <span ng-show="isNotEmptyFilter">- <a href ng-click="clearFilters()">{{'common.controls.clearFilters' | translate}}</a></span>
     </div>
     <div class="pull-right left-margin">


[14/39] ambari git commit: AMBARI-14831. Upgrading ambari-server from 2.0.1 to 2.2.0 sets incorrect value for "storm.zookeeper.superACL".(vbrodetskyi)

Posted by nc...@apache.org.
AMBARI-14831. Upgrading ambari-server from 2.0.1 to 2.2.0 sets incorrect value for "storm.zookeeper.superACL".(vbrodetskyi)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/195a3595
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/195a3595
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/195a3595

Branch: refs/heads/branch-dev-patch-upgrade
Commit: 195a35953606263d2810b10a731f3b295aa29a60
Parents: e72a6b4
Author: Vitaly Brodetskyi <vb...@hortonworks.com>
Authored: Thu Jan 28 21:29:15 2016 +0200
Committer: Vitaly Brodetskyi <vb...@hortonworks.com>
Committed: Thu Jan 28 21:29:15 2016 +0200

----------------------------------------------------------------------
 .../ambari/server/upgrade/UpgradeCatalog210.java     |  4 +---
 .../ambari/server/upgrade/UpgradeCatalog222.java     | 15 +++++++++++++++
 .../ambari/server/upgrade/UpgradeCatalog222Test.java |  6 ++++--
 3 files changed, 20 insertions(+), 5 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/195a3595/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog210.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog210.java b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog210.java
index 7940e02..faf4b96 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog210.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog210.java
@@ -24,7 +24,6 @@ import com.google.gson.JsonObject;
 import com.google.gson.JsonParser;
 import com.google.inject.Inject;
 import com.google.inject.Injector;
-import com.google.inject.persist.Transactional;
 import org.apache.ambari.server.AmbariException;
 import org.apache.ambari.server.api.services.AmbariMetaInfo;
 import org.apache.ambari.server.configuration.Configuration;
@@ -47,7 +46,6 @@ import org.apache.ambari.server.state.Clusters;
 import org.apache.ambari.server.state.Config;
 import org.apache.ambari.server.state.Service;
 import org.apache.ambari.server.state.StackId;
-import org.apache.ambari.server.state.kerberos.AbstractKerberosDescriptorContainer;
 import org.apache.ambari.server.state.kerberos.KerberosDescriptor;
 import org.apache.ambari.server.state.kerberos.KerberosDescriptorFactory;
 import org.apache.ambari.server.state.kerberos.KerberosIdentityDescriptor;
@@ -1648,7 +1646,7 @@ public class UpgradeCatalog210 extends AbstractUpgradeCatalog {
               newStormProps.put("nimbus.supervisors.users", "['{{storm_user}}']");
             }
             if (!cluster.getDesiredConfigByType("storm-site").getProperties().containsKey("storm.zookeeper.superACL")) {
-              newStormProps.put("storm.zookeeper.superACL", "sasl:{{storm_base_jaas_principal}}");
+              newStormProps.put("storm.zookeeper.superACL", "sasl:{{storm_bare_jaas_principal}}");
             }
             if (!cluster.getDesiredConfigByType("storm-site").getProperties().containsKey("ui.filter.params")) {
               newStormProps.put("ui.filter.params", "{'type': 'kerberos', 'kerberos.principal': '{{storm_ui_jaas_principal}}', 'kerberos.keytab': '{{storm_ui_keytab_path}}', 'kerberos.name.rules': 'DEFAULT'}");

http://git-wip-us.apache.org/repos/asf/ambari/blob/195a3595/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog222.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog222.java b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog222.java
index bcbbf41..22f6a39 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog222.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog222.java
@@ -31,6 +31,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import java.sql.SQLException;
+import java.util.HashMap;
 import java.util.Map;
 
 /**
@@ -97,7 +98,21 @@ public class UpgradeCatalog222 extends AbstractUpgradeCatalog {
   protected void executeDMLUpdates() throws AmbariException, SQLException {
     addNewConfigurationsFromXml();
     updateAlerts();
+    updateStormConfigs();
+  }
+
+  protected void updateStormConfigs() throws  AmbariException {
+    AmbariManagementController ambariManagementController = injector.getInstance(AmbariManagementController.class);
+    Map<String, Cluster> clusterMap = getCheckedClusterMap(ambariManagementController.getClusters());
 
+    for (final Cluster cluster : clusterMap.values()) {
+      if (cluster.getDesiredConfigByType("storm-site") != null && cluster.getDesiredConfigByType("storm-site").getProperties().containsKey("storm.zookeeper.superACL")
+              && cluster.getDesiredConfigByType("storm-site").getProperties().get("storm.zookeeper.superACL").equals("sasl:{{storm_base_jaas_principal}}")) {
+        Map<String, String> newStormProps = new HashMap<String, String>();
+        newStormProps.put("storm.zookeeper.superACL", "sasl:{{storm_bare_jaas_principal}}");
+        updateConfigurationPropertiesForCluster(cluster, "storm-site", newStormProps, true, false);
+      }
+    }
   }
 
   protected void updateAlerts() {

http://git-wip-us.apache.org/repos/asf/ambari/blob/195a3595/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog222Test.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog222Test.java b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog222Test.java
index f33a31d..d6d51a9 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog222Test.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog222Test.java
@@ -78,19 +78,21 @@ public class UpgradeCatalog222Test {
   public void testExecuteDMLUpdates() throws Exception {
     Method addNewConfigurationsFromXml = AbstractUpgradeCatalog.class.getDeclaredMethod("addNewConfigurationsFromXml");
     Method updateAlerts = UpgradeCatalog222.class.getDeclaredMethod("updateAlerts");
-
+    Method updateStormConfigs = UpgradeCatalog222.class.getDeclaredMethod("updateStormConfigs");
 
 
     UpgradeCatalog222 upgradeCatalog222 = createMockBuilder(UpgradeCatalog222.class)
             .addMockedMethod(addNewConfigurationsFromXml)
             .addMockedMethod(updateAlerts)
+            .addMockedMethod(updateStormConfigs)
             .createMock();
 
     upgradeCatalog222.addNewConfigurationsFromXml();
     expectLastCall().once();
     upgradeCatalog222.updateAlerts();
     expectLastCall().once();
-
+    upgradeCatalog222.updateStormConfigs();
+    expectLastCall().once();
 
     replay(upgradeCatalog222);
 


[37/39] ambari git commit: AMBARI-14864. Scheduled requests get 403 error on execution. (mpapirkovskyy)

Posted by nc...@apache.org.
AMBARI-14864. Scheduled requests get 403 error on execution. (mpapirkovskyy)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/d8804cfb
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/d8804cfb
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/d8804cfb

Branch: refs/heads/branch-dev-patch-upgrade
Commit: d8804cfb576681f86d52cfb9778b18f2f958b09d
Parents: bb3b509
Author: Myroslav Papirkovskyy <mp...@hortonworks.com>
Authored: Mon Feb 1 16:09:01 2016 +0200
Committer: Myroslav Papirkovskyy <mp...@hortonworks.com>
Committed: Mon Feb 1 16:45:14 2016 +0200

----------------------------------------------------------------------
 .../server/security/authorization/AmbariAuthorizationFilter.java  | 3 ++-
 1 file changed, 2 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/d8804cfb/ambari-server/src/main/java/org/apache/ambari/server/security/authorization/AmbariAuthorizationFilter.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/security/authorization/AmbariAuthorizationFilter.java b/ambari-server/src/main/java/org/apache/ambari/server/security/authorization/AmbariAuthorizationFilter.java
index 82c03e4..e2a28d0 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/security/authorization/AmbariAuthorizationFilter.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/security/authorization/AmbariAuthorizationFilter.java
@@ -122,7 +122,8 @@ public class AmbariAuthorizationFilter implements Filter {
       }
     }
 
-    if (authentication == null || !authentication.isAuthenticated()) {
+    if (authentication == null || authentication instanceof AnonymousAuthenticationToken ||
+        !authentication.isAuthenticated()) {
       String token = httpRequest.getHeader(INTERNAL_TOKEN_HEADER);
       if (token != null) {
         context.setAuthentication(new InternalAuthenticationToken(token));


[36/39] ambari git commit: AMBARI-14862. [Ambari tarballs] Package ambari-agent according to ambari-server style (make common definition for deb, rpm) (aonishuk)

Posted by nc...@apache.org.
AMBARI-14862. [Ambari tarballs] Package ambari-agent according to ambari-server style (make common definition for deb,rpm) (aonishuk)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/bb3b5096
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/bb3b5096
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/bb3b5096

Branch: refs/heads/branch-dev-patch-upgrade
Commit: bb3b5096136f0ac652aabea86b5f378659e597c8
Parents: 2fd458e
Author: Andrew Onishuk <ao...@hortonworks.com>
Authored: Mon Feb 1 15:57:40 2016 +0200
Committer: Andrew Onishuk <ao...@hortonworks.com>
Committed: Mon Feb 1 15:57:40 2016 +0200

----------------------------------------------------------------------
 ambari-agent/pom.xml                      | 501 +------------------------
 ambari-agent/src/packages/tarball/all.xml | 168 ++++++++-
 2 files changed, 182 insertions(+), 487 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/bb3b5096/ambari-agent/pom.xml
----------------------------------------------------------------------
diff --git a/ambari-agent/pom.xml b/ambari-agent/pom.xml
index dbbb5ad..918065f 100644
--- a/ambari-agent/pom.xml
+++ b/ambari-agent/pom.xml
@@ -50,10 +50,10 @@
     <ambari.server.module>../ambari-server</ambari.server.module>
     <target.cache.dir>${project.build.directory}/cache/</target.cache.dir>
     <resource.keeper.script>${ambari.server.module}/src/main/python/ambari_server/resourceFilesKeeper.py</resource.keeper.script>
-    <init.d.dir>/etc/rc.d/init.d</init.d.dir>
     <resourceManagementSrcLocation>${project.basedir}/../ambari-common/src/main/python/resource_management</resourceManagementSrcLocation>
     <resourcesFolder>${ambari.server.module}/src/main/resources</resourcesFolder>
     <customActionsLocation>${target.cache.dir}/custom_actions</customActionsLocation>
+    <empty.dir>src/packages/tarball</empty.dir> <!-- any directory in project with not very big amount of files (not to waste-load them) -->
   </properties>
   <build>
     <plugins>
@@ -137,26 +137,6 @@
             </goals>
           </execution>
           <execution>
-            <!-- TODO: Looks like section is unused, maybe remove? -->
-            <configuration>
-              <executable>${executable.python}</executable>
-              <workingDirectory>target${dirsep}ambari-agent-${project.version}</workingDirectory>
-              <arguments>
-                <argument>${project.basedir}${dirsep}src${dirsep}main${dirsep}python${dirsep}setup.py</argument>
-                <argument>clean</argument>
-                <argument>bdist_dumb</argument>
-              </arguments>
-              <environmentVariables>
-                <PYTHONPATH>target${dirsep}ambari-agent-${project.version}${pathsep}$PYTHONPATH</PYTHONPATH>
-              </environmentVariables>
-            </configuration>
-            <id>python-package</id>
-            <phase>package</phase>
-            <goals>
-              <goal>exec</goal>
-            </goals>
-          </execution>
-          <execution>
             <configuration>
               <executable>${executable.python}</executable>
               <workingDirectory>${basedir}</workingDirectory>
@@ -179,7 +159,7 @@
       <plugin>
         <groupId>org.codehaus.mojo</groupId>
         <artifactId>rpm-maven-plugin</artifactId>
-        <version>2.0.1</version>
+        <version>2.1.4</version>
         <executions>
           <execution>
             <!-- unbinds rpm creation from maven lifecycle -->
@@ -220,266 +200,43 @@
           <autoRequires>false</autoRequires>
           <mappings>
             <mapping>
-              <directory>${agent.install.dir}</directory>
-              <username>root</username>
-              <groupname>root</groupname>
-              <sources>
-                <source>
-                  <location>${project.build.directory}${dirsep}${project.artifactId}-${project.version}${dirsep}ambari_agent</location>
-                </source>
-              </sources>
-            </mapping>
-            <mapping>
-              <directory>/var/lib/ambari-agent/</directory>
-              <filemode>755</filemode>
-              <username>root</username>
-              <groupname>root</groupname>
-              <directoryIncluded>false</directoryIncluded>
-              <sources>
-                <source>
-                  <location>../ambari-common/src/main/unix/ambari-python-wrap</location>
-                </source>
-              </sources>
-            </mapping>
-            <mapping>
-              <directory>/var/lib/ambari-agent/</directory>
-              <filemode>755</filemode>
-              <username>root</username>
-              <groupname>root</groupname>
-              <directoryIncluded>false</directoryIncluded>
-              <sources>
-                <source>
-                  <location>conf/unix/ambari-sudo.sh</location>
-                </source>
-              </sources>
-            </mapping>         
-            <mapping>
-              <directory>${ambari_commons.install.dir}</directory>
-              <username>root</username>
-              <groupname>root</groupname>
-              <sources>
-                <source>
-                  <location>${project.basedir}/../ambari-common/src/main/python/ambari_commons</location>
-                </source>
-              </sources>
-            </mapping>
-            <mapping>
-              <directory>${resource_management.install.dir}</directory>
-              <username>root</username>
-              <groupname>root</groupname>
-              <sources>
-                <source>
-                  <location>${resourceManagementSrcLocation}</location>
-                </source>
-              </sources>
-            </mapping>
-            <mapping>
-              <directory>${jinja.install.dir}</directory>
-              <filemode>755</filemode>
-              <username>root</username>
-              <groupname>root</groupname>
-              <sources>
-                <source>
-                  <location>${project.basedir}/../ambari-common/src/main/python/ambari_jinja2/ambari_jinja2</location>
-                  <excludes>
-                    <exclude>${project.basedir}/../ambari-common/src/main/python/ambari_jinja2/ambari_jinja2/testsuite</exclude>
-                  </excludes>
-                </source>
-              </sources>
-            </mapping>
-            <mapping>
-              <directory>${simplejson.install.dir}</directory>
-              <filemode>755</filemode>
-              <username>root</username>
-              <groupname>root</groupname>
-              <sources>
-                <source>
-                  <location>${project.basedir}/../ambari-common/src/main/python/ambari_simplejson/</location>
-                </source>
-              </sources>
-            </mapping>
-             <mapping>
-              <directory>${lib.dir}/examples</directory>
-              <filemode>755</filemode>
-              <username>root</username>
-              <groupname>root</groupname>
-              <sources>
-                <source>
-                  <location>src/examples</location>
-                </source>
-              </sources>
-            </mapping>
-            <mapping>
-              <directory>/etc/ambari-agent/conf</directory>
-              <filemode>755</filemode>
-              <username>root</username>
-              <groupname>root</groupname>
-              <sources>
-                <source>
-                  <location>conf/unix/ambari-agent.ini</location>
-                </source>
-              </sources>
-            </mapping>
-            <mapping>
-              <directory>/etc/ambari-agent/conf</directory>
-              <filemode>755</filemode>
-              <username>root</username>
-              <groupname>root</groupname>
-              <sources>
-                <source>
-                  <location>conf/unix/logging.conf.sample</location>
-                </source>
-              </sources>
-            </mapping>              
-            <mapping>
-              <directory>/usr/sbin</directory>
-              <filemode>755</filemode>
-              <username>root</username>
-              <groupname>root</groupname>
-              <directoryIncluded>false</directoryIncluded>
-              <sources>
-                <source>
-                  <location>conf/unix/ambari-agent</location>
-                  <filter>true</filter>
-                </source>
-              </sources>
-            </mapping>
-            <mapping>
-              <directory>/var/lib/ambari-agent</directory>
-              <configuration>true</configuration>
-              <filemode>700</filemode>
-              <username>root</username>
-              <groupname>root</groupname>
-              <sources>
-                <source>
-                  <location>conf/unix/ambari-env.sh</location>
-                </source>
-              </sources>
-            </mapping>
-            <mapping>
-              <directory>/var/lib/ambari-agent</directory>
-              <filemode>700</filemode>
-              <username>root</username>
-              <groupname>root</groupname>
-              <sources>
-                <source>
-                  <location>conf/unix/install-helper.sh</location>
-                </source>
-              </sources>
-            </mapping>
-            <mapping>
-              <directory>/var/lib/ambari-agent</directory>
-              <filemode>700</filemode>
-              <username>root</username>
-              <groupname>root</groupname>
-              <sources>
-                <source>
-                  <location>conf/unix/upgrade_agent_configs.py</location>
-                </source>
-              </sources>
-            </mapping>
-            <mapping>
-              <directory>${package.pid.dir}</directory>
-              <filemode>755</filemode>
-              <username>root</username>
-              <groupname>root</groupname>
-            </mapping>
-            <mapping>
-              <directory>/var/lib/${project.artifactId}/data</directory>
-              <filemode>755</filemode>
-              <username>root</username>
-              <groupname>root</groupname>
-            </mapping>
-            <mapping>
-              <directory>/var/lib/${project.artifactId}/tmp</directory>
-              <filemode>777</filemode>
-              <username>root</username>
-              <groupname>root</groupname>
-            </mapping>
-            <mapping>
-              <directory>/var/lib/${project.artifactId}/keys</directory>
-              <filemode>755</filemode>
-              <username>root</username>
-              <groupname>root</groupname>
-            </mapping>
-            <mapping>
-              <directory>${package.log.dir}</directory>
-              <filemode>755</filemode>
-              <username>root</username>
-              <groupname>root</groupname>
-            </mapping>
-            <mapping>
-              <directory>/etc/init</directory>
-              <filemode>755</filemode>
-              <username>root</username>
-              <groupname>root</groupname>
-              <sources>
-                <source>
-                  <location>etc/init/ambari-agent.conf</location>
-                </source>
-              </sources>
-            </mapping>
-            <mapping>
-              <directory>${init.d.dir}</directory>
-              <filemode>755</filemode>
+              <directory>/etc</directory>
               <username>root</username>
               <groupname>root</groupname>
+              <directoryIncluded>false</directoryIncluded> <!-- avoid managing /etc/init.d -->
               <sources>
                 <source>
-                  <location>etc/init.d/ambari-agent</location>
+                  <location>${project.build.directory}${dirsep}${project.artifactId}-${project.version}/etc</location>
                 </source>
               </sources>
             </mapping>
-            <mapping>
-              <directory>/var/lib/${project.artifactId}/data</directory>
-              <filemode>755</filemode>
+			<mapping>
+              <directory>/usr</directory>
               <username>root</username>
               <groupname>root</groupname>
               <sources>
                 <source>
-                  <location>../version</location>
-                  <filter>true</filter>
+                  <location>${project.build.directory}${dirsep}${project.artifactId}-${project.version}/usr</location>
                 </source>
               </sources>
             </mapping>
             <mapping>
-              <directory>/var/lib/ambari-agent/cache</directory>
-              <filemode>755</filemode>
+              <directory>/var</directory>
               <username>root</username>
               <groupname>root</groupname>
               <sources>
                 <source>
-                  <location>${target.cache.dir}</location>
-                  <excludes>
-                    <exclude>custom_actions/scripts/*</exclude>
-                  </excludes>
+                  <location>${project.build.directory}${dirsep}${project.artifactId}-${project.version}/var</location>
                 </source>
               </sources>
             </mapping>
-            <mapping>
-              <directory>/var/lib/ambari-agent/cache/custom_actions</directory>
-              <filemode>755</filemode>
-              <username>root</username>
-              <groupname>root</groupname>
-              <sources>
-                <source>
-                  <location>${customActionsLocation}</location>
-                </source>
-              </sources>
-            </mapping>
-            <mapping>
-              <directory>/var/lib/ambari-agent/lib</directory>
-              <filemode>755</filemode>
-              <username>root</username>
-              <groupname>root</groupname>
-            </mapping>
           </mappings>
         </configuration>
       </plugin>
       <plugin>
         <groupId>org.vafer</groupId>
         <artifactId>jdeb</artifactId>
-        <version>1.0.1</version>
+        <version>1.4</version>
         <executions>
           <execution>
             <phase>none</phase>
@@ -491,234 +248,14 @@
         <configuration>
           <controlDir>${basedir}/src/main/package/deb/control</controlDir>
           <deb>${basedir}/target/${project.artifactId}_${package-version}-${package-release}.deb</deb>
+          <skip>false</skip>
+          <skipPOMs>false</skipPOMs>
           <dataSet>
             <data>
-              <src>${project.build.directory}/${project.artifactId}-${project.version}/ambari_agent</src>
-              <type>directory</type>
-              <mapper>
-                <type>perm</type>
-                <prefix>${agent.install.dir}</prefix>
-                <user>root</user>
-                <group>root</group>
-              </mapper>
-            </data>
-            <data>
-              <src>${project.basedir}/../ambari-common/src/main/python/ambari_jinja2/ambari_jinja2</src>
-              <excludes>${project.basedir}/../ambari-common/src/main/python/ambari_jinja2/ambari_jinja2/testsuite</excludes>
-              <type>directory</type>
-              <mapper>
-                <type>perm</type>
-                <prefix>${jinja.install.dir}</prefix>
-                <user>root</user>
-                <group>root</group>
-                <filemode>755</filemode>
-              </mapper>
-            </data>
-            <data>
-              <src>${project.basedir}/../ambari-common/src/main/python/ambari_simplejson</src>
-              <type>directory</type>
+              <src>${project.build.directory}${dirsep}${project.artifactId}-${project.version}.tar.gz</src>
+              <type>archive</type>
               <mapper>
                 <type>perm</type>
-                <prefix>${simplejson.install.dir}</prefix>
-                <user>root</user>
-                <group>root</group>
-                <filemode>755</filemode>
-              </mapper>
-            </data>
-            <data>
-              <src>src/examples</src>
-              <type>directory</type>
-              <mapper>
-                <type>perm</type>
-                <prefix>${lib.dir}/examples</prefix>
-                <user>root</user>
-                <group>root</group>
-                <filemode>755</filemode>
-              </mapper>
-            </data>
-            <data>
-              <src>conf/unix/ambari-agent.ini</src>
-              <type>file</type>
-              <mapper>
-                <type>perm</type>
-                <prefix>/etc/ambari-agent/conf</prefix>
-                  <user>root</user>
-                  <group>root</group>
-                  <filemode>755</filemode>
-              </mapper>
-            </data>
-            <data>
-              <src>conf/unix/logging.conf.sample</src>
-              <type>file</type>
-              <mapper>
-                <type>perm</type>
-                <prefix>/etc/ambari-agent/conf</prefix>
-                  <user>root</user>
-                  <group>root</group>
-                  <filemode>755</filemode>
-              </mapper>
-            </data>
-            <data>
-              <src>${basedir}/target/src/ambari-agent</src>
-              <type>file</type>
-              <mapper>
-                <type>perm</type>
-                <prefix>/usr/sbin</prefix>
-                <user>root</user>
-                <group>root</group>
-                <filemode>755</filemode>
-              </mapper>
-            </data>
-            <data>
-              <src>conf/unix/ambari-env.sh</src>
-              <type>file</type>
-              <mapper>
-                <type>perm</type>
-                <prefix>/var/lib/ambari-agent</prefix>
-                <user>root</user>
-                <group>root</group>
-                <filemode>700</filemode>
-              </mapper>
-            </data>
-            <data>
-              <src>conf/unix/install-helper.sh</src>
-              <type>file</type>
-              <mapper>
-                <type>perm</type>
-                <prefix>/var/lib/ambari-agent</prefix>
-                <user>root</user>
-                <group>root</group>
-                <filemode>700</filemode>
-              </mapper>
-            </data>
-            <data>
-              <src>conf/unix/upgrade_agent_configs.py</src>
-              <type>file</type>
-              <mapper>
-                <type>perm</type>
-                <prefix>/var/lib/ambari-agent</prefix>
-                <user>root</user>
-                <group>root</group>
-                <filemode>700</filemode>
-              </mapper>
-            </data>
-            <data>
-              <type>template</type>
-              <paths>
-                <path>${package.pid.dir}</path>
-                <path>/var/lib/${project.artifactId}/data</path>
-                <path>/var/lib/${project.artifactId}/tmp</path>
-                <path>/var/lib/${project.artifactId}/keys</path>
-                <path>${package.log.dir}</path>
-                <path>/var/lib/${project.artifactId}/lib</path>
-              </paths>
-              <mapper>
-                <type>perm</type>
-                <user>root</user>
-                <group>root</group>
-                <filemode>755</filemode>
-              </mapper>
-            </data>
-            <data>
-              <src>etc/init.d/ambari-agent</src>
-              <type>file</type>
-              <mapper>
-                <type>perm</type>
-                <prefix>/etc/init.d</prefix>
-                <user>root</user>
-                <group>root</group>
-                <filemode>755</filemode>
-              </mapper>
-            </data>
-            <data>
-              <src>etc/init/ambari-agent.conf</src>
-              <type>file</type>
-              <mapper>
-                <type>perm</type>
-                <prefix>/etc/init</prefix>
-                <user>root</user>
-                <group>root</group>
-                <filemode>755</filemode>
-              </mapper>
-            </data>
-            <data>
-              <src>${basedir}/target/src/version</src>
-              <type>file</type>
-              <mapper>
-                <type>perm</type>
-                <prefix>/var/lib/${project.artifactId}/data</prefix>
-                <user>root</user>
-                <group>root</group>
-                <filemode>755</filemode>
-              </mapper>
-            </data>
-            <data>
-              <src>../ambari-common/src/main/unix/ambari-python-wrap</src>
-              <type>file</type>
-              <mapper>
-                <type>perm</type>
-                <prefix>/var/lib/ambari-agent</prefix>
-                <user>root</user>
-                <group>root</group>
-                <filemode>755</filemode>
-              </mapper>
-            </data>
-            <data>
-              <src>conf/unix/ambari-sudo.sh</src>
-              <type>file</type>
-              <mapper>
-                <type>perm</type>
-                <prefix>/var/lib/ambari-agent</prefix>
-                <user>root</user>
-                <group>root</group>
-                <filemode>755</filemode>
-              </mapper>
-            </data>
-            <data>
-              <src>${target.cache.dir}</src>
-              <type>directory</type>
-              <excludes>custom_actions/scripts/*</excludes>
-              <mapper>
-                <type>perm</type>
-                <prefix>/var/lib/ambari-agent/cache</prefix>
-                <user>root</user>
-                <group>root</group>
-                <filemode>755</filemode>
-              </mapper>
-            </data>
-            <data>
-              <src>${customActionsLocation}</src>
-              <type>directory</type>
-              <mapper>
-                <type>perm</type>
-                <prefix>/var/lib/ambari-agent/cache/custom_actions</prefix>
-                <user>root</user>
-                <group>root</group>
-                <filemode>755</filemode>
-              </mapper>
-            </data>
-            <data>
-              <src>
-                ${project.basedir}/../ambari-common/src/main/python/ambari_commons
-              </src>
-              <type>directory</type>
-              <mapper>
-                <type>perm</type>
-                <prefix>${ambari_commons.install.dir}</prefix>
-                <filemode>755</filemode>
-                <user>root</user>
-                <group>root</group>
-              </mapper>
-            </data>
-            <data>
-              <src>
-                ${resourceManagementSrcLocation}
-              </src>
-              <type>directory</type>
-              <mapper>
-                <type>perm</type>
-                <prefix>${resource_management.install.dir}</prefix>
-                <filemode>755</filemode>
                 <user>root</user>
                 <group>root</group>
               </mapper>
@@ -773,7 +310,7 @@
           </execution>
            <execution>
             <id>copy-resources-filter</id>
-            <phase>prepare-package</phase>
+            <phase>generate-resources</phase>
             <goals>
               <goal>copy-resources</goal>
             </goals>
@@ -963,12 +500,6 @@
       </build>
     </profile>
     <profile>
-      <id>suse11</id>
-      <properties>
-        <init.d.dir>/etc/init.d</init.d.dir>
-      </properties>
-    </profile>
-    <profile>
       <id>pluggable-stack-definition</id>
       <activation>
         <activeByDefault>false</activeByDefault>

http://git-wip-us.apache.org/repos/asf/ambari/blob/bb3b5096/ambari-agent/src/packages/tarball/all.xml
----------------------------------------------------------------------
diff --git a/ambari-agent/src/packages/tarball/all.xml b/ambari-agent/src/packages/tarball/all.xml
index 0e4f34b..d4ed2ff 100644
--- a/ambari-agent/src/packages/tarball/all.xml
+++ b/ambari-agent/src/packages/tarball/all.xml
@@ -23,12 +23,176 @@
   -->
   <formats>
     <format>dir</format>
+    <format>tar.gz</format>
   </formats>
   <includeBaseDirectory>false</includeBaseDirectory>
+  <!-- File sets. Syntax:
+	  <fileSets>
+	    <fileSet>
+	      <useDefaultExcludes/>
+	      <outputDirectory/>
+	      <includes/>
+	      <excludes/>
+	      <fileMode/>
+	      <directoryMode/>
+	      <directory/>
+	      <lineEnding/>
+	      <filtered/>
+	    </fileSet>
+	  </fileSets>
+  -->
   <fileSets>
     <fileSet>
-      <directory>src/main/python</directory>
-      <outputDirectory>/</outputDirectory>
+      <directoryMode>755</directoryMode>
+      <directory>src/main/python/ambari_agent</directory>
+      <outputDirectory>${agent.install.dir}</outputDirectory>
+    </fileSet>
+    <fileSet>
+      <directoryMode>755</directoryMode>
+      <directory>${project.basedir}/../ambari-common/src/main/python/ambari_commons</directory>
+      <outputDirectory>${ambari_commons.install.dir}</outputDirectory>
+    </fileSet>
+    <fileSet>
+      <directoryMode>755</directoryMode>
+      <directory>${resourceManagementSrcLocation}</directory>
+      <outputDirectory>${resource_management.install.dir}</outputDirectory>
+    </fileSet>
+    <fileSet>
+      <directoryMode>755</directoryMode>
+      <directory>${project.basedir}/../ambari-common/src/main/python/ambari_jinja2/ambari_jinja2</directory>
+      <outputDirectory>${jinja.install.dir}</outputDirectory>
+      <excludes>
+      	<exclude>**/testsuite/**</exclude>
+      </excludes>
+    </fileSet>
+    <fileSet>
+      <directoryMode>755</directoryMode>
+      <directory>${project.basedir}/../ambari-common/src/main/python/ambari_simplejson</directory>
+      <outputDirectory>${simplejson.install.dir}</outputDirectory>
+    </fileSet>
+    <fileSet>
+      <directoryMode>755</directoryMode>
+      <directory>src/examples</directory>
+      <outputDirectory>${lib.dir}/examples</outputDirectory>
+    </fileSet>
+    <fileSet>
+      <directoryMode>755</directoryMode>
+      <directory>${empty.dir}</directory>
+      <outputDirectory>${package.pid.dir}</outputDirectory>
+	  <excludes>
+	    <exclude>*/**</exclude>
+	  </excludes>
+    </fileSet>
+    <fileSet>
+      <directoryMode>777</directoryMode>
+      <directory>${empty.dir}</directory>
+      <outputDirectory>/var/lib/${project.artifactId}/tmp</outputDirectory>
+	  <excludes>
+	    <exclude>*/**</exclude>
+	  </excludes>
+    </fileSet>
+    <fileSet>
+      <directoryMode>755</directoryMode>
+      <directory>${empty.dir}</directory>
+      <outputDirectory>/var/lib/${project.artifactId}/keys</outputDirectory>
+	  <excludes>
+	    <exclude>*/**</exclude>
+	  </excludes>
+    </fileSet>
+    <fileSet>
+      <directoryMode>755</directoryMode>
+      <directory>${empty.dir}</directory>
+      <outputDirectory>${package.log.dir}</outputDirectory>
+	  <excludes>
+	    <exclude>*/**</exclude>
+	  </excludes>
+    </fileSet>
+    <fileSet>
+      <directoryMode>755</directoryMode>
+      <directory>${empty.dir}</directory>
+      <outputDirectory>/var/lib/ambari-agent/lib</outputDirectory>
+	  <excludes>
+	    <exclude>*/**</exclude>
+	  </excludes>
+    </fileSet>
+    <fileSet>
+      <directoryMode>755</directoryMode>
+      <directory>${target.cache.dir}</directory>
+      <outputDirectory>/var/lib/ambari-agent/cache</outputDirectory>
+    </fileSet>
+    <fileSet>
+      <directoryMode>755</directoryMode>
+      <directory>${pluggableStackDefinitionOutput}/custom_actions</directory>
+      <outputDirectory>/var/lib/ambari-agent/cache/custom_actions</outputDirectory>
     </fileSet>
   </fileSets>
+  <!-- Single files. Syntax:
+	  <files>
+	    <file>
+	      <source/>
+	      <outputDirectory/>
+	      <destName/>
+	      <fileMode/>
+	      <lineEnding/>
+	      <filtered/>
+	    </file>
+	  </files>
+  -->
+  <files>
+    <file>
+      <fileMode>755</fileMode>
+      <source>../ambari-common/src/main/unix/ambari-python-wrap</source>
+      <outputDirectory>/var/lib/ambari-agent/</outputDirectory>
+    </file>
+    <file>
+      <fileMode>755</fileMode>
+      <source>conf/unix/ambari-sudo.sh</source>
+      <outputDirectory>/var/lib/ambari-agent/</outputDirectory>
+    </file>
+    <file>
+      <fileMode>644</fileMode>
+      <source>conf/unix/ambari-agent.ini</source>
+      <outputDirectory>/etc/ambari-agent/conf</outputDirectory>
+    </file>
+    <file>
+      <fileMode>644</fileMode>
+      <source>conf/unix/logging.conf.sample</source>
+      <outputDirectory>/etc/ambari-agent/conf</outputDirectory>
+    </file>
+    <file>
+      <fileMode>755</fileMode>
+      <source>conf/unix/ambari-agent</source>
+      <outputDirectory>/usr/sbin</outputDirectory>
+    </file>
+    <file>
+      <fileMode>700</fileMode>
+      <source>conf/unix/ambari-env.sh</source>
+      <outputDirectory>/var/lib/ambari-agent</outputDirectory>
+    </file>
+    <file>
+      <fileMode>700</fileMode>
+      <source>conf/unix/install-helper.sh</source>
+      <outputDirectory>/var/lib/ambari-agent</outputDirectory>
+    </file>
+    <file>
+      <fileMode>700</fileMode>
+      <source>conf/unix/upgrade_agent_configs.py</source>
+      <outputDirectory>/var/lib/ambari-agent</outputDirectory>
+    </file>
+    <file>
+      <fileMode>644</fileMode>
+      <source>etc/init/ambari-agent.conf</source>
+      <outputDirectory>/etc/init</outputDirectory>
+    </file>
+    <file>
+      <fileMode>755</fileMode>
+      <source>etc/init.d/ambari-agent</source>
+      <outputDirectory>/etc/init.d/ambari-agent</outputDirectory>
+    </file>
+    <file>
+      <fileMode>644</fileMode>
+      <source>${basedir}/target/src/version</source>
+      <outputDirectory>/var/lib/${project.artifactId}/data</outputDirectory>
+    </file>
+  </files>
 </assembly>


[22/39] ambari git commit: AMBARI-14842. ambari agent upstart support (aonishuk)

Posted by nc...@apache.org.
AMBARI-14842. ambari agent upstart support (aonishuk)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/6d83195a
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/6d83195a
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/6d83195a

Branch: refs/heads/branch-dev-patch-upgrade
Commit: 6d83195a72a1dc6435ebf29fd470c512a49b7a69
Parents: a2f436a
Author: Andrew Onishuk <ao...@hortonworks.com>
Authored: Fri Jan 29 13:41:28 2016 +0200
Committer: Andrew Onishuk <ao...@hortonworks.com>
Committed: Fri Jan 29 13:41:28 2016 +0200

----------------------------------------------------------------------
 ambari-agent/conf/unix/ambari-agent     | 13 +++++++++++
 ambari-agent/etc/init/ambari-agent.conf | 34 ++++++++++++++++++++++++++++
 ambari-agent/pom.xml                    | 22 ++++++++++++++++++
 3 files changed, 69 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/6d83195a/ambari-agent/conf/unix/ambari-agent
----------------------------------------------------------------------
diff --git a/ambari-agent/conf/unix/ambari-agent b/ambari-agent/conf/unix/ambari-agent
index 4f9e1e5..96ef12b 100755
--- a/ambari-agent/conf/unix/ambari-agent
+++ b/ambari-agent/conf/unix/ambari-agent
@@ -157,7 +157,14 @@ case "$1" in
         change_files_permissions
         
         echo "Starting ambari-agent"
+        
+        if [ "$AMBARI_AGENT_RUN_IN_FOREGROUND" == true ] ; then
+          $PYTHON $AMBARI_AGENT_PY_SCRIPT "$@" > $OUTFILE 2>&1 
+          exit $?
+        fi
+        
         nohup $PYTHON $AMBARI_AGENT_PY_SCRIPT "$@" > $OUTFILE 2>&1 &
+        
         sleep 2
         PID=$!
         echo "Verifying $AMBARI_AGENT process status..."
@@ -219,6 +226,12 @@ case "$1" in
             echo "Stopping $AMBARI_AGENT"
             change_files_permissions
             $PYTHON $AGENT_SCRIPT stop
+            
+            status ambari-agent 2>/dev/null | grep start 1>/dev/null
+            if [ "$?" -eq 0 ] ; then
+              echo "Stopping $AMBARI_AGENT upstart job"
+              stop ambari-agent > /dev/null
+            fi
           fi
           echo "Removing PID file at $PIDFILE"
           ambari-sudo.sh rm -f $PIDFILE

http://git-wip-us.apache.org/repos/asf/ambari/blob/6d83195a/ambari-agent/etc/init/ambari-agent.conf
----------------------------------------------------------------------
diff --git a/ambari-agent/etc/init/ambari-agent.conf b/ambari-agent/etc/init/ambari-agent.conf
new file mode 100644
index 0000000..021eb3b
--- /dev/null
+++ b/ambari-agent/etc/init/ambari-agent.conf
@@ -0,0 +1,34 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific
+
+#ambari-agent
+description     "ambari agent"
+
+stop on runlevel [06]
+
+env PIDFILE=/var/run/ambari-agent/ambari-agent.pid
+
+kill signal SIGKILL
+respawn
+
+script
+   . /etc/environment
+
+   export AMBARI_AGENT_RUN_IN_FOREGROUND=true
+   exec /etc/init.d/ambari-agent start
+end script
+
+post-stop script
+  rm -f $PIDFILE
+end script
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/6d83195a/ambari-agent/pom.xml
----------------------------------------------------------------------
diff --git a/ambari-agent/pom.xml b/ambari-agent/pom.xml
index 346ac07..dbbb5ad 100644
--- a/ambari-agent/pom.xml
+++ b/ambari-agent/pom.xml
@@ -409,6 +409,17 @@
               <groupname>root</groupname>
             </mapping>
             <mapping>
+              <directory>/etc/init</directory>
+              <filemode>755</filemode>
+              <username>root</username>
+              <groupname>root</groupname>
+              <sources>
+                <source>
+                  <location>etc/init/ambari-agent.conf</location>
+                </source>
+              </sources>
+            </mapping>
+            <mapping>
               <directory>${init.d.dir}</directory>
               <filemode>755</filemode>
               <username>root</username>
@@ -620,6 +631,17 @@
               </mapper>
             </data>
             <data>
+              <src>etc/init/ambari-agent.conf</src>
+              <type>file</type>
+              <mapper>
+                <type>perm</type>
+                <prefix>/etc/init</prefix>
+                <user>root</user>
+                <group>root</group>
+                <filemode>755</filemode>
+              </mapper>
+            </data>
+            <data>
               <src>${basedir}/target/src/version</src>
               <type>file</type>
               <mapper>


[15/39] ambari git commit: AMBARI-14836. RU/EU from HDP 2.3 to 2.4 fails on HDFS Balancer command if NN HA since config dfs.namenode.rpc-address needs to be deleted (alejandro)

Posted by nc...@apache.org.
AMBARI-14836. RU/EU from HDP 2.3 to 2.4 fails on HDFS Balancer command if NN HA since config dfs.namenode.rpc-address needs to be deleted (alejandro)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/c9e51380
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/c9e51380
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/c9e51380

Branch: refs/heads/branch-dev-patch-upgrade
Commit: c9e51380bdb155dcb4f4f1885dfa739c6c6a6445
Parents: 195a359
Author: Alejandro Fernandez <af...@hortonworks.com>
Authored: Thu Jan 28 12:54:34 2016 -0800
Committer: Alejandro Fernandez <af...@hortonworks.com>
Committed: Thu Jan 28 15:00:11 2016 -0800

----------------------------------------------------------------------
 .../stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.4.xml   |  5 +++++
 .../resources/stacks/HDP/2.3/upgrades/upgrade-2.4.xml    |  4 ++++
 .../resources/stacks/HDP/2.4/upgrades/config-upgrade.xml | 11 +++++++++++
 3 files changed, 20 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/c9e51380/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.4.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.4.xml b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.4.xml
index 3d65252..6154634 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.4.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.4.xml
@@ -268,6 +268,11 @@
       <skippable>true</skippable>  <!-- May fix configuration problems manually -->
       <supports-auto-skip-failure>false</supports-auto-skip-failure>
 
+      <!-- HDFS -->
+      <execute-stage service="HDFS" component="NAMENODE" title="Apply config changes for NameNode">
+        <task xsi:type="configure" id="hdp_2_4_0_0_namenode_ha_adjustments"/>
+      </execute-stage>
+
       <!-- YARN -->
       <execute-stage service="YARN" component="RESOURCEMANAGER" title="Calculating Yarn Properties for Spark">
         <task xsi:type="server_action" summary="Calculating Yarn Properties for Spark Shuffle" class="org.apache.ambari.server.serveraction.upgrades.SparkShufflePropertyConfig" />

http://git-wip-us.apache.org/repos/asf/ambari/blob/c9e51380/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.4.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.4.xml b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.4.xml
index f248293..33a9b16 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.4.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.4.xml
@@ -436,6 +436,10 @@
       <component name="NAMENODE">
         <pre-downgrade /> <!--  no-op to prevent config changes on downgrade -->
 
+        <pre-upgrade>
+          <task xsi:type="configure" id="hdp_2_4_0_0_namenode_ha_adjustments"/>
+        </pre-upgrade>
+
         <upgrade>
           <task xsi:type="restart-task" />
         </upgrade>

http://git-wip-us.apache.org/repos/asf/ambari/blob/c9e51380/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/config-upgrade.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/config-upgrade.xml b/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/config-upgrade.xml
index 0145f3b..5949486 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/config-upgrade.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/config-upgrade.xml
@@ -20,6 +20,17 @@
 
   <services>
 
+    <service name="HDFS">
+      <component name="NAMENODE">
+        <changes>
+          <definition xsi:type="configure" id="hdp_2_4_0_0_namenode_ha_adjustments">
+            <type>hdfs-site</type>
+            <transfer operation="delete" delete-key="dfs.namenode.rpc-address" if-type="hdfs-site" if-key="dfs.nameservices" if-key-state="present"/>
+          </definition>
+        </changes>
+      </component>
+    </service>
+
     <service name="HBASE">
       <component name="HBASE_MASTER">
         <changes>


[31/39] ambari git commit: AMBARI-14838. UI - cannot "Pause Downgrade" during RU (Joe Wang via rzang)

Posted by nc...@apache.org.
AMBARI-14838. UI - cannot "Pause Downgrade" during RU (Joe Wang via rzang)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/7444981f
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/7444981f
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/7444981f

Branch: refs/heads/branch-dev-patch-upgrade
Commit: 7444981ff63694c8851064c4f7e346a480855de5
Parents: 528e8ea
Author: Richard Zang <rz...@apache.org>
Authored: Sat Jan 30 12:27:20 2016 +0800
Committer: Richard Zang <rz...@apache.org>
Committed: Sat Jan 30 12:27:20 2016 +0800

----------------------------------------------------------------------
 .../main/admin/stack_and_upgrade_controller.js  | 22 ++++++++++++++++++--
 ambari-web/app/messages.js                      |  2 ++
 .../stack_upgrade/stack_upgrade_wizard.hbs      |  8 +++----
 ambari-web/app/utils/ajax/ajax.js               |  3 +++
 4 files changed, 29 insertions(+), 6 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/7444981f/ambari-web/app/controllers/main/admin/stack_and_upgrade_controller.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/controllers/main/admin/stack_and_upgrade_controller.js b/ambari-web/app/controllers/main/admin/stack_and_upgrade_controller.js
index f80c5ea..16a58d6 100644
--- a/ambari-web/app/controllers/main/admin/stack_and_upgrade_controller.js
+++ b/ambari-web/app/controllers/main/admin/stack_and_upgrade_controller.js
@@ -553,13 +553,15 @@ App.MainAdminStackAndUpgradeController = Em.Controller.extend(App.LocalStorage,
    * abort upgrade (in order to start Downgrade)
    */
   abortUpgrade: function () {
+    var errorCallback = this.get('isDowngrade') ? 'abortDowngradeErrorCallback' : 'abortUpgradeErrorCallback';
     return App.ajax.send({
       name: 'admin.upgrade.abort',
       sender: this,
       data: {
-        upgradeId: this.get('upgradeId')
+        upgradeId: this.get('upgradeId'),
+        isDowngrade: this.get('isDowngrade')
       },
-      error: 'abortUpgradeErrorCallback'
+      error: errorCallback
     });
   },
 
@@ -579,6 +581,22 @@ App.MainAdminStackAndUpgradeController = Em.Controller.extend(App.LocalStorage,
     App.showAlertPopup(header, body);
   },
 
+  /**
+   * error callback of <code>abortDowngrade()</code>
+   * @param {object} data
+   */
+  abortDowngradeErrorCallback: function (data) {
+    var header = Em.I18n.t('admin.stackDowngrade.state.paused.fail.header');
+    var body = Em.I18n.t('admin.stackDowngrade.state.paused.fail.body');
+    if(data && data.responseText){
+      try {
+        var json = $.parseJSON(data.responseText);
+        body = body + ' ' + json.message;
+      } catch (err) {}
+    }
+    App.showAlertPopup(header, body);
+  },
+
   retryUpgrade: function () {
     this.setProperties({
       requestInProgress: true,

http://git-wip-us.apache.org/repos/asf/ambari/blob/7444981f/ambari-web/app/messages.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/messages.js b/ambari-web/app/messages.js
index d6ecbf3..916ca2a 100644
--- a/ambari-web/app/messages.js
+++ b/ambari-web/app/messages.js
@@ -1497,6 +1497,8 @@ Em.I18n.translations = {
   'admin.stackVersions.version.downgrade.suspended': "Downgrade: Paused",
   'admin.stackUpgrade.state.paused.fail.header': "Pause Upgrade failed",
   'admin.stackUpgrade.state.paused.fail.body': "Upgrade could not be paused. Try again later.",
+  'admin.stackDowngrade.state.paused.fail.header': "Pause Downgrade failed",
+  'admin.stackDowngrade.state.paused.fail.body': "Downgrade could not be paused. Try again later.",
 
   'admin.stackVersions.version.upgrade.upgradeOptions.header': "Upgrade Options",
   'admin.stackVersions.version.upgrade.upgradeOptions.bodyMsg.version': "You are about to perform an upgrade to <b>{0}</b>.",

http://git-wip-us.apache.org/repos/asf/ambari/blob/7444981f/ambari-web/app/templates/main/admin/stack_upgrade/stack_upgrade_wizard.hbs
----------------------------------------------------------------------
diff --git a/ambari-web/app/templates/main/admin/stack_upgrade/stack_upgrade_wizard.hbs b/ambari-web/app/templates/main/admin/stack_upgrade/stack_upgrade_wizard.hbs
index a13e2c2..02022cf 100644
--- a/ambari-web/app/templates/main/admin/stack_upgrade/stack_upgrade_wizard.hbs
+++ b/ambari-web/app/templates/main/admin/stack_upgrade/stack_upgrade_wizard.hbs
@@ -81,7 +81,7 @@
                     <button class="btn btn-danger" {{bindAttr disabled="controller.requestInProgress"}} {{action confirmDowngrade view.failedItem target="controller"}}>{{t common.downgrade}}</button>
                   {{/if}}
                   {{#if isDowngrade}}
-                    <button class="btn" {{action closeWizard target="view.parentView"}}>{{t admin.stackUpgrade.pauseDowngrade}}</button>
+                    <button class="btn" {{action pauseUpgrade target="view"}}>{{t admin.stackUpgrade.pauseDowngrade}}</button>
                   {{else}}
                     <button class="btn" {{action pauseUpgrade target="view"}}>{{t admin.stackUpgrade.pauseUpgrade}}</button>
                   {{/if}}
@@ -108,7 +108,7 @@
                 <button class="btn btn-danger" {{bindAttr disabled="controller.requestInProgress"}} {{action confirmDowngrade view.manualItem target="controller"}}>{{t common.downgrade}}</button>
               {{/if}}
               {{#if isDowngrade}}
-                <button class="btn" {{action closeWizard target="view.parentView"}}>{{t admin.stackUpgrade.pauseDowngrade}}</button>
+                <button class="btn" {{action pauseUpgrade target="view"}}>{{t admin.stackUpgrade.pauseDowngrade}}</button>
               {{else}}
                 <button class="btn" {{action pauseUpgrade target="view"}}>{{t admin.stackUpgrade.pauseUpgrade}}</button>
               {{/if}}
@@ -189,7 +189,7 @@
                 <button class="btn btn-danger" {{bindAttr disabled="controller.requestInProgress"}} {{action confirmDowngrade view.manualItem target="controller"}}>{{t common.downgrade}}</button>
               {{/if}}
               {{#if isDowngrade}}
-                <button class="btn" {{action closeWizard target="view.parentView"}}>{{t admin.stackUpgrade.pauseDowngrade}}</button>
+                <button class="btn" {{action pauseUpgrade target="view"}}>{{t admin.stackUpgrade.pauseDowngrade}}</button>
               {{else}}
                 <button class="btn" {{action pauseUpgrade target="view"}}>{{t admin.stackUpgrade.pauseUpgrade}}</button>
               {{/if}}
@@ -252,7 +252,7 @@
                   <button class="btn btn-danger" {{bindAttr disabled="controller.requestInProgress"}} {{action confirmDowngrade view.manualItem target="controller"}}>{{t common.downgrade}}</button>
               {{/if}}
               {{#if isDowngrade}}
-                  <button class="btn" {{action closeWizard target="view.parentView"}}>{{t admin.stackUpgrade.pauseDowngrade}}</button>
+                  <button class="btn" {{action pauseUpgrade target="view"}}>{{t admin.stackUpgrade.pauseDowngrade}}</button>
               {{else}}
                   <button class="btn" {{action pauseUpgrade target="view"}}>{{t admin.stackUpgrade.pauseUpgrade}}</button>
               {{/if}}

http://git-wip-us.apache.org/repos/asf/ambari/blob/7444981f/ambari-web/app/utils/ajax/ajax.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/utils/ajax/ajax.js b/ambari-web/app/utils/ajax/ajax.js
index 4d8d195..4872f50 100644
--- a/ambari-web/app/utils/ajax/ajax.js
+++ b/ambari-web/app/utils/ajax/ajax.js
@@ -1666,6 +1666,9 @@ var urls = {
     'format': function (data) {
       return {
         data: JSON.stringify({
+          "RequestInfo": {
+            "downgrade": data.isDowngrade
+          },
           "Upgrade": {
             "request_status": "ABORTED"
           }


[34/39] ambari git commit: AMBARI-378. Missing values in “Advanced hive-site” section.(vbrodetskyi)

Posted by nc...@apache.org.
AMBARI-378. Missing values in “Advanced hive-site” section.(vbrodetskyi)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/60db82eb
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/60db82eb
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/60db82eb

Branch: refs/heads/branch-dev-patch-upgrade
Commit: 60db82ebd345c033a2983f54c973c8f15767705c
Parents: 40a0ef6
Author: Vitaly Brodetskyi <vb...@hortonworks.com>
Authored: Mon Feb 1 12:56:52 2016 +0200
Committer: Vitaly Brodetskyi <vb...@hortonworks.com>
Committed: Mon Feb 1 12:56:52 2016 +0200

----------------------------------------------------------------------
 .../stacks/HDP/2.2/services/HIVE/configuration/hive-site.xml       | 2 --
 1 file changed, 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/60db82eb/ambari-server/src/main/resources/stacks/HDP/2.2/services/HIVE/configuration/hive-site.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2/services/HIVE/configuration/hive-site.xml b/ambari-server/src/main/resources/stacks/HDP/2.2/services/HIVE/configuration/hive-site.xml
index 88c0291..35db348 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.2/services/HIVE/configuration/hive-site.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2/services/HIVE/configuration/hive-site.xml
@@ -1795,7 +1795,6 @@ limitations under the License.
   </property>
   <property>
     <name>hive.server2.authentication.pam.services</name>
-    <value></value>
     <property-type>DONT_ADD_ON_UPGRADE</property-type>
     <depends-on>
       <property>
@@ -1806,7 +1805,6 @@ limitations under the License.
   </property>
   <property>
     <name>hive.server2.custom.authentication.class</name>
-    <value></value>
     <property-type>DONT_ADD_ON_UPGRADE</property-type>
     <depends-on>
       <property>


[16/39] ambari git commit: AMBARI-14822 Combo Search: Create auto suggest for Host attribute filters (Joe Wang via rzang)

Posted by nc...@apache.org.
AMBARI-14822 Combo Search: Create auto suggest for Host attribute filters (Joe Wang via rzang)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/5e09053d
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/5e09053d
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/5e09053d

Branch: refs/heads/branch-dev-patch-upgrade
Commit: 5e09053deb0700b09ced4f8f12902ff2ea60154a
Parents: c9e5138
Author: Richard Zang <rz...@apache.org>
Authored: Fri Jan 29 08:37:44 2016 +0800
Committer: Richard Zang <rz...@apache.org>
Committed: Fri Jan 29 08:37:44 2016 +0800

----------------------------------------------------------------------
 .../main/admin/stack_and_upgrade_controller.js  | 22 ++++++++++++++++++--
 ambari-web/app/messages.js                      |  2 ++
 .../stack_upgrade/stack_upgrade_wizard.hbs      |  8 +++----
 ambari-web/app/utils/ajax/ajax.js               |  3 +++
 4 files changed, 29 insertions(+), 6 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/5e09053d/ambari-web/app/controllers/main/admin/stack_and_upgrade_controller.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/controllers/main/admin/stack_and_upgrade_controller.js b/ambari-web/app/controllers/main/admin/stack_and_upgrade_controller.js
index f80c5ea..16a58d6 100644
--- a/ambari-web/app/controllers/main/admin/stack_and_upgrade_controller.js
+++ b/ambari-web/app/controllers/main/admin/stack_and_upgrade_controller.js
@@ -553,13 +553,15 @@ App.MainAdminStackAndUpgradeController = Em.Controller.extend(App.LocalStorage,
    * abort upgrade (in order to start Downgrade)
    */
   abortUpgrade: function () {
+    var errorCallback = this.get('isDowngrade') ? 'abortDowngradeErrorCallback' : 'abortUpgradeErrorCallback';
     return App.ajax.send({
       name: 'admin.upgrade.abort',
       sender: this,
       data: {
-        upgradeId: this.get('upgradeId')
+        upgradeId: this.get('upgradeId'),
+        isDowngrade: this.get('isDowngrade')
       },
-      error: 'abortUpgradeErrorCallback'
+      error: errorCallback
     });
   },
 
@@ -579,6 +581,22 @@ App.MainAdminStackAndUpgradeController = Em.Controller.extend(App.LocalStorage,
     App.showAlertPopup(header, body);
   },
 
+  /**
+   * error callback of <code>abortDowngrade()</code>
+   * @param {object} data
+   */
+  abortDowngradeErrorCallback: function (data) {
+    var header = Em.I18n.t('admin.stackDowngrade.state.paused.fail.header');
+    var body = Em.I18n.t('admin.stackDowngrade.state.paused.fail.body');
+    if(data && data.responseText){
+      try {
+        var json = $.parseJSON(data.responseText);
+        body = body + ' ' + json.message;
+      } catch (err) {}
+    }
+    App.showAlertPopup(header, body);
+  },
+
   retryUpgrade: function () {
     this.setProperties({
       requestInProgress: true,

http://git-wip-us.apache.org/repos/asf/ambari/blob/5e09053d/ambari-web/app/messages.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/messages.js b/ambari-web/app/messages.js
index d6ecbf3..916ca2a 100644
--- a/ambari-web/app/messages.js
+++ b/ambari-web/app/messages.js
@@ -1497,6 +1497,8 @@ Em.I18n.translations = {
   'admin.stackVersions.version.downgrade.suspended': "Downgrade: Paused",
   'admin.stackUpgrade.state.paused.fail.header': "Pause Upgrade failed",
   'admin.stackUpgrade.state.paused.fail.body': "Upgrade could not be paused. Try again later.",
+  'admin.stackDowngrade.state.paused.fail.header': "Pause Downgrade failed",
+  'admin.stackDowngrade.state.paused.fail.body': "Downgrade could not be paused. Try again later.",
 
   'admin.stackVersions.version.upgrade.upgradeOptions.header': "Upgrade Options",
   'admin.stackVersions.version.upgrade.upgradeOptions.bodyMsg.version': "You are about to perform an upgrade to <b>{0}</b>.",

http://git-wip-us.apache.org/repos/asf/ambari/blob/5e09053d/ambari-web/app/templates/main/admin/stack_upgrade/stack_upgrade_wizard.hbs
----------------------------------------------------------------------
diff --git a/ambari-web/app/templates/main/admin/stack_upgrade/stack_upgrade_wizard.hbs b/ambari-web/app/templates/main/admin/stack_upgrade/stack_upgrade_wizard.hbs
index a13e2c2..02022cf 100644
--- a/ambari-web/app/templates/main/admin/stack_upgrade/stack_upgrade_wizard.hbs
+++ b/ambari-web/app/templates/main/admin/stack_upgrade/stack_upgrade_wizard.hbs
@@ -81,7 +81,7 @@
                     <button class="btn btn-danger" {{bindAttr disabled="controller.requestInProgress"}} {{action confirmDowngrade view.failedItem target="controller"}}>{{t common.downgrade}}</button>
                   {{/if}}
                   {{#if isDowngrade}}
-                    <button class="btn" {{action closeWizard target="view.parentView"}}>{{t admin.stackUpgrade.pauseDowngrade}}</button>
+                    <button class="btn" {{action pauseUpgrade target="view"}}>{{t admin.stackUpgrade.pauseDowngrade}}</button>
                   {{else}}
                     <button class="btn" {{action pauseUpgrade target="view"}}>{{t admin.stackUpgrade.pauseUpgrade}}</button>
                   {{/if}}
@@ -108,7 +108,7 @@
                 <button class="btn btn-danger" {{bindAttr disabled="controller.requestInProgress"}} {{action confirmDowngrade view.manualItem target="controller"}}>{{t common.downgrade}}</button>
               {{/if}}
               {{#if isDowngrade}}
-                <button class="btn" {{action closeWizard target="view.parentView"}}>{{t admin.stackUpgrade.pauseDowngrade}}</button>
+                <button class="btn" {{action pauseUpgrade target="view"}}>{{t admin.stackUpgrade.pauseDowngrade}}</button>
               {{else}}
                 <button class="btn" {{action pauseUpgrade target="view"}}>{{t admin.stackUpgrade.pauseUpgrade}}</button>
               {{/if}}
@@ -189,7 +189,7 @@
                 <button class="btn btn-danger" {{bindAttr disabled="controller.requestInProgress"}} {{action confirmDowngrade view.manualItem target="controller"}}>{{t common.downgrade}}</button>
               {{/if}}
               {{#if isDowngrade}}
-                <button class="btn" {{action closeWizard target="view.parentView"}}>{{t admin.stackUpgrade.pauseDowngrade}}</button>
+                <button class="btn" {{action pauseUpgrade target="view"}}>{{t admin.stackUpgrade.pauseDowngrade}}</button>
               {{else}}
                 <button class="btn" {{action pauseUpgrade target="view"}}>{{t admin.stackUpgrade.pauseUpgrade}}</button>
               {{/if}}
@@ -252,7 +252,7 @@
                   <button class="btn btn-danger" {{bindAttr disabled="controller.requestInProgress"}} {{action confirmDowngrade view.manualItem target="controller"}}>{{t common.downgrade}}</button>
               {{/if}}
               {{#if isDowngrade}}
-                  <button class="btn" {{action closeWizard target="view.parentView"}}>{{t admin.stackUpgrade.pauseDowngrade}}</button>
+                  <button class="btn" {{action pauseUpgrade target="view"}}>{{t admin.stackUpgrade.pauseDowngrade}}</button>
               {{else}}
                   <button class="btn" {{action pauseUpgrade target="view"}}>{{t admin.stackUpgrade.pauseUpgrade}}</button>
               {{/if}}

http://git-wip-us.apache.org/repos/asf/ambari/blob/5e09053d/ambari-web/app/utils/ajax/ajax.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/utils/ajax/ajax.js b/ambari-web/app/utils/ajax/ajax.js
index df3782f..a301148 100644
--- a/ambari-web/app/utils/ajax/ajax.js
+++ b/ambari-web/app/utils/ajax/ajax.js
@@ -1666,6 +1666,9 @@ var urls = {
     'format': function (data) {
       return {
         data: JSON.stringify({
+          "RequestInfo": {
+            "downgrade": data.isDowngrade
+          },
           "Upgrade": {
             "request_status": "ABORTED"
           }


[11/39] ambari git commit: AMBARI-14828. Config History table does not update data after clear filters (onechiporenko)

Posted by nc...@apache.org.
AMBARI-14828. Config History table does not update data after clear filters (onechiporenko)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/d057581b
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/d057581b
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/d057581b

Branch: refs/heads/branch-dev-patch-upgrade
Commit: d057581b5b6930c988fe805b799258a9fb5a9744
Parents: 7371c1b
Author: Oleg Nechiporenko <on...@apache.org>
Authored: Thu Jan 28 15:05:11 2016 +0200
Committer: Oleg Nechiporenko <on...@apache.org>
Committed: Thu Jan 28 15:05:11 2016 +0200

----------------------------------------------------------------------
 ambari-web/app/views/common/table_view.js         | 18 ++++++++++++++++--
 .../views/main/dashboard/config_history_view.js   | 12 ++++++++++++
 2 files changed, 28 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/d057581b/ambari-web/app/views/common/table_view.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views/common/table_view.js b/ambari-web/app/views/common/table_view.js
index 2ac9649..386a2aa 100644
--- a/ambari-web/app/views/common/table_view.js
+++ b/ambari-web/app/views/common/table_view.js
@@ -359,9 +359,23 @@ App.TableView = Em.View.extend(App.UserPref, {
       };
       this.get('filterConditions').push(filterCondition);
     }
+
+    this.saveAllFilterConditions();
+  },
+
+  /**
+   * Save not empty <code>filterConditions</code> to the localStorage
+   *
+   * @method saveAllFilterConditions
+   */
+  saveAllFilterConditions: function () {
+    var filterConditions = this.get('filterConditions');
     // remove empty entries
-    this.set('filterConditions', this.get('filterConditions').filter(function(item){ return !Em.isEmpty(item.value); }));
-    App.db.setFilterConditions(this.get('controller.name'), this.get('filterConditions'));
+    filterConditions = filterConditions.filter(function(item) {
+      return !Em.isEmpty(item.value);
+    });
+    this.set('filterConditions', filterConditions);
+    App.db.setFilterConditions(this.get('controller.name'), filterConditions);
   },
 
   saveDisplayLength: function() {

http://git-wip-us.apache.org/repos/asf/ambari/blob/d057581b/ambari-web/app/views/main/dashboard/config_history_view.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views/main/dashboard/config_history_view.js b/ambari-web/app/views/main/dashboard/config_history_view.js
index 88f9025..3bfb22b 100644
--- a/ambari-web/app/views/main/dashboard/config_history_view.js
+++ b/ambari-web/app/views/main/dashboard/config_history_view.js
@@ -208,6 +208,18 @@ App.MainConfigHistoryView = App.TableView.extend(App.TableServerViewMixin, {
   },
 
   /**
+   * Clear all filter values, update filter conditions in the localStorage and update table data with API-request
+   *
+   * @method clearFilters
+   * @override
+   */
+  clearFilters: function () {
+    this._super();
+    this.saveAllFilterConditions();
+    this.refresh();
+  },
+
+  /**
    * callback executed after refresh call done
    * @method refreshDone
    */


[25/39] ambari git commit: AMBARI-14839 - DEA is not enabled due to configuration conflict and history log directory can not be changed (Jeff Zhang via jonathanhurley)

Posted by nc...@apache.org.
AMBARI-14839 - DEA is not enabled due to configuration conflict and history log directory can not be changed (Jeff Zhang via jonathanhurley)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/6bac8ec1
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/6bac8ec1
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/6bac8ec1

Branch: refs/heads/branch-dev-patch-upgrade
Commit: 6bac8ec15e774349d454aa05ec38e23d762b3ce8
Parents: 516912b
Author: Jonathan Hurley <jh...@hortonworks.com>
Authored: Fri Jan 29 11:27:35 2016 -0500
Committer: Jonathan Hurley <jh...@hortonworks.com>
Committed: Fri Jan 29 11:34:41 2016 -0500

----------------------------------------------------------------------
 .../SPARK/1.2.0.2.2/package/scripts/params.py   |  2 +-
 .../SPARK/configuration/spark-defaults.xml      |  4 +--
 .../configuration/spark-thrift-sparkconf.xml    | 31 ++++++++++++++++----
 .../stacks/HDP/2.4/upgrades/config-upgrade.xml  | 15 ----------
 4 files changed, 29 insertions(+), 23 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/6bac8ec1/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/params.py b/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/params.py
index 935c47e..68c4f37 100644
--- a/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/params.py
@@ -86,7 +86,7 @@ hive_user = status_params.hive_user
 spark_group = status_params.spark_group
 user_group = status_params.user_group
 spark_hdfs_user_dir = format("/user/{spark_user}")
-spark_history_dir = 'hdfs:///spark-history'
+spark_history_dir = default('/configurations/spark-defaults/spark.history.fs.logDirectory', "hdfs:///spark-history")
 
 spark_history_server_pid_file = status_params.spark_history_server_pid_file
 spark_thrift_server_pid_file = status_params.spark_thrift_server_pid_file

http://git-wip-us.apache.org/repos/asf/ambari/blob/6bac8ec1/ambari-server/src/main/resources/stacks/HDP/2.4/services/SPARK/configuration/spark-defaults.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.4/services/SPARK/configuration/spark-defaults.xml b/ambari-server/src/main/resources/stacks/HDP/2.4/services/SPARK/configuration/spark-defaults.xml
index d8af790..1a6552f 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.4/services/SPARK/configuration/spark-defaults.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.4/services/SPARK/configuration/spark-defaults.xml
@@ -31,7 +31,7 @@
     </property>
     <property>
         <name>spark.history.fs.logDirectory</name>
-        <value>{{spark_history_dir}}</value>
+        <value>hdfs:///spark-history</value>
         <description>
             Base directory for history spark application log.
         </description>
@@ -45,7 +45,7 @@
     </property>
     <property>
         <name>spark.eventLog.dir</name>
-        <value>{{spark_history_dir}}</value>
+        <value>hdfs:///spark-history</value>
         <description>
             Base directory in which Spark events are logged, if spark.eventLog.enabled is true.
         </description>

http://git-wip-us.apache.org/repos/asf/ambari/blob/6bac8ec1/ambari-server/src/main/resources/stacks/HDP/2.4/services/SPARK/configuration/spark-thrift-sparkconf.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.4/services/SPARK/configuration/spark-thrift-sparkconf.xml b/ambari-server/src/main/resources/stacks/HDP/2.4/services/SPARK/configuration/spark-thrift-sparkconf.xml
index 3b13496..2dbfe51 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.4/services/SPARK/configuration/spark-thrift-sparkconf.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.4/services/SPARK/configuration/spark-thrift-sparkconf.xml
@@ -75,14 +75,17 @@
   <property>
     <name>spark.history.fs.logDirectory</name>
     <value>{{spark_history_dir}}</value>
+    <final>true</final>
     <description>
-      Base directory for history spark application log.
+      Base directory for history spark application log. It is the same value
+      as in spark-defaults.xml.
     </description>
   </property>
 
   <property>
     <name>spark.eventLog.enabled</name>
     <value>true</value>
+    <final>true</final>
     <description>
       Whether to log Spark events, useful for reconstructing the Web UI after the application has finished.
     </description>
@@ -91,8 +94,10 @@
   <property>
     <name>spark.eventLog.dir</name>
     <value>{{spark_history_dir}}</value>
+    <final>true</final>
     <description>
-      Base directory in which Spark events are logged, if spark.eventLog.enabled is true.
+      Base directory in which Spark events are logged, if spark.eventLog.enabled is true. It is the same value
+      as in spark-defaults.xml.
     </description>
   </property>
 
@@ -138,10 +143,26 @@
   </property>
 
   <property>
-    <name>spark.executor.instances</name>
-    <value>2</value>
+    <name>spark.dynamicAllocation.initialExecutors</name>
+    <value>0</value>
     <description>
-      The number of executor.
+      Initial number of executors to run if dynamic allocation is enabled.
+    </description>
+  </property>
+
+  <property>
+    <name>spark.dynamicAllocation.maxExecutors</name>
+    <value>10</value>
+    <description>
+      Upper bound for the number of executors if dynamic allocation is enabled.
+    </description>
+  </property>
+
+  <property>
+    <name>spark.dynamicAllocation.minExecutors</name>
+    <value>0</value>
+    <description>
+      Lower bound for the number of executors if dynamic allocation is enabled.
     </description>
   </property>
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/6bac8ec1/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/config-upgrade.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/config-upgrade.xml b/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/config-upgrade.xml
index 5949486..ee06577 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/config-upgrade.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/config-upgrade.xml
@@ -172,9 +172,6 @@
             <type>spark-defaults</type>
             <transfer operation="delete" delete-key="spark.yarn.services" />
             <set key="spark.history.provider" value="org.apache.spark.deploy.history.FsHistoryProvider"/>
-            <set key="spark.history.fs.logDirectory" value="{{spark_history_dir}}"/>
-            <set key="spark.eventLog.enabled" value="true"/>
-            <set key="spark.eventLog.dir" value="{{spark_history_dir}}"/>
           </definition>
         </changes>
       </component>
@@ -190,18 +187,6 @@
             <transfer operation="delete" delete-key="spark.yarn.submit.file.replication" />
             <transfer operation="delete" delete-key="spark.yarn.preserve.staging.files" />
             <transfer operation="delete" delete-key="spark.yarn.max.executor.failures" />
-            <set key="spark.history.provider" value="org.apache.spark.deploy.history.FsHistoryProvider"/>
-            <set key="spark.history.fs.logDirectory" value="{{spark_history_dir}}"/>
-            <set key="spark.eventLog.enabled" value="true"/>
-            <set key="spark.eventLog.dir" value="{{spark_history_dir}}"/>
-            <set key="spark.master" value="{{spark_thrift_master}}"/>
-            <set key="spark.scheduler.allocation.file" value="{{spark_conf}}/spark-thrift-fairscheduler.xml"/>
-            <set key="spark.scheduler.mode" value="FAIR"/>
-            <set key="spark.shuffle.service.enabled" value="true"/>
-            <set key="spark.dynamicAllocation.enabled" value="true"/>
-            <set key="spark.executor.instances" value="2"/>
-            <set key="spark.yarn.am.memory" value="512m"/>
-            <set key="spark.executor.memory" value="1g"/>
           </definition>
         </changes>
       </component>


[32/39] ambari git commit: AMBARI-14857 Ambari UI does not accept dfs.data.dir entries with file protocol when enabling 'storage type' feature. (ababiichuk)

Posted by nc...@apache.org.
AMBARI-14857 Ambari UI does not accept dfs.data.dir entries with file protocol when enabling 'storage type' feature. (ababiichuk)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/e15ed442
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/e15ed442
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/e15ed442

Branch: refs/heads/branch-dev-patch-upgrade
Commit: e15ed4429d5bf8e7c2861342c9b890665fbd1ab7
Parents: 7444981
Author: ababiichuk <ab...@hortonworks.com>
Authored: Mon Feb 1 11:02:54 2016 +0200
Committer: ababiichuk <ab...@hortonworks.com>
Committed: Mon Feb 1 11:02:54 2016 +0200

----------------------------------------------------------------------
 ambari-web/app/utils/validator.js       | 2 +-
 ambari-web/test/utils/validator_test.js | 1 +
 2 files changed, 2 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/e15ed442/ambari-web/app/utils/validator.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/utils/validator.js b/ambari-web/app/utils/validator.js
index 82c9f47..6426163 100644
--- a/ambari-web/app/utils/validator.js
+++ b/ambari-web/app/utils/validator.js
@@ -76,7 +76,7 @@ module.exports = {
    * @returns {boolean}
    */
   isValidDataNodeDir: function(value) {
-    var dirRegex = /^(\[[0-9a-zA-Z]+\])?(\/[0-9a-z]*)/;
+    var dirRegex = /^(\[[0-9a-zA-Z]+\])?(file:\/\/)?(\/[0-9a-z]*)/;
     var winRegex = /^(\[[0-9a-zA-Z]+\])?[a-zA-Z]:\\[0-9a-zA-Z]*/;
     var winUrlRegex = /^(\[[0-9a-zA-Z]+\])?file:\/\/\/[a-zA-Z]:\/[0-9a-zA-Z]*/;
     var dirs = value.split(',');

http://git-wip-us.apache.org/repos/asf/ambari/blob/e15ed442/ambari-web/test/utils/validator_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/utils/validator_test.js b/ambari-web/test/utils/validator_test.js
index 5ff6185..ef90561 100644
--- a/ambari-web/test/utils/validator_test.js
+++ b/ambari-web/test/utils/validator_test.js
@@ -327,6 +327,7 @@ describe('validator', function () {
       {m:'"/1a2b3c" - valid',i:'/1a2b3c',e:true},
       {m:'"[ssd]/1a2b3c" - valid',i:'[ssd]/1a2b3c',e:true},
       {m:'"[DISK]/1a2b3c" - valid',i:'[DISK]/1a2b3c',e:true},
+      {m:'"[DISK]file:///1a2b3c" - valid',i:'[DISK]file:///1a2b3c',e:true},
       {m:'"[] /1a2b3c" - invalid',i:'[] /1a2b3c',e:false},
       {m:'"[ssd] /1a2b3c" - invalid',i:'[ssd] /1a2b3c',e:false},
       {m:'"[/1a2b3c]" - invalid',i:'[/1a2b3c]',e:false},


[24/39] ambari git commit: Revert "AMBARI-14839 - DEA is not enabled due to configuration conflict and history log directory can not be changed (Jeff Zhang via jonathanhurley)"

Posted by nc...@apache.org.
Revert "AMBARI-14839 - DEA is not enabled due to configuration conflict and history log directory can not be changed (Jeff Zhang via jonathanhurley)"

This reverts commit 981ede5455c02ae1bd4aac7495c2c667bc889dbc.


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/516912bb
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/516912bb
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/516912bb

Branch: refs/heads/branch-dev-patch-upgrade
Commit: 516912bb2fec21688a554401feca18f61ca84c66
Parents: 6d83195
Author: Jonathan Hurley <jh...@hortonworks.com>
Authored: Fri Jan 29 11:26:44 2016 -0500
Committer: Jonathan Hurley <jh...@hortonworks.com>
Committed: Fri Jan 29 11:26:44 2016 -0500

----------------------------------------------------------------------
 .../SPARK/1.2.0.2.2/package/scripts/params.py   |  4 +--
 .../SPARK/configuration/spark-defaults.xml      |  4 +--
 .../configuration/spark-thrift-sparkconf.xml    | 31 ++++----------------
 3 files changed, 8 insertions(+), 31 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/516912bb/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/params.py b/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/params.py
index abf9191..935c47e 100644
--- a/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/params.py
@@ -86,9 +86,7 @@ hive_user = status_params.hive_user
 spark_group = status_params.spark_group
 user_group = status_params.user_group
 spark_hdfs_user_dir = format("/user/{spark_user}")
-spark_history_dir = "hdfs:///spark-history"
-if 'spark-defaults' in config['configurations'] and 'spark.history.fs.logDirectory' in config['configurations']['spark-defaults']:
-  spark_history_dir = config['configurations']['spark-defaults']['spark.history.fs.logDirectory']
+spark_history_dir = 'hdfs:///spark-history'
 
 spark_history_server_pid_file = status_params.spark_history_server_pid_file
 spark_thrift_server_pid_file = status_params.spark_thrift_server_pid_file

http://git-wip-us.apache.org/repos/asf/ambari/blob/516912bb/ambari-server/src/main/resources/stacks/HDP/2.4/services/SPARK/configuration/spark-defaults.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.4/services/SPARK/configuration/spark-defaults.xml b/ambari-server/src/main/resources/stacks/HDP/2.4/services/SPARK/configuration/spark-defaults.xml
index 1a6552f..d8af790 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.4/services/SPARK/configuration/spark-defaults.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.4/services/SPARK/configuration/spark-defaults.xml
@@ -31,7 +31,7 @@
     </property>
     <property>
         <name>spark.history.fs.logDirectory</name>
-        <value>hdfs:///spark-history</value>
+        <value>{{spark_history_dir}}</value>
         <description>
             Base directory for history spark application log.
         </description>
@@ -45,7 +45,7 @@
     </property>
     <property>
         <name>spark.eventLog.dir</name>
-        <value>hdfs:///spark-history</value>
+        <value>{{spark_history_dir}}</value>
         <description>
             Base directory in which Spark events are logged, if spark.eventLog.enabled is true.
         </description>

http://git-wip-us.apache.org/repos/asf/ambari/blob/516912bb/ambari-server/src/main/resources/stacks/HDP/2.4/services/SPARK/configuration/spark-thrift-sparkconf.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.4/services/SPARK/configuration/spark-thrift-sparkconf.xml b/ambari-server/src/main/resources/stacks/HDP/2.4/services/SPARK/configuration/spark-thrift-sparkconf.xml
index 2dbfe51..3b13496 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.4/services/SPARK/configuration/spark-thrift-sparkconf.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.4/services/SPARK/configuration/spark-thrift-sparkconf.xml
@@ -75,17 +75,14 @@
   <property>
     <name>spark.history.fs.logDirectory</name>
     <value>{{spark_history_dir}}</value>
-    <final>true</final>
     <description>
-      Base directory for history spark application log. It is the same value
-      as in spark-defaults.xml.
+      Base directory for history spark application log.
     </description>
   </property>
 
   <property>
     <name>spark.eventLog.enabled</name>
     <value>true</value>
-    <final>true</final>
     <description>
       Whether to log Spark events, useful for reconstructing the Web UI after the application has finished.
     </description>
@@ -94,10 +91,8 @@
   <property>
     <name>spark.eventLog.dir</name>
     <value>{{spark_history_dir}}</value>
-    <final>true</final>
     <description>
-      Base directory in which Spark events are logged, if spark.eventLog.enabled is true. It is the same value
-      as in spark-defaults.xml.
+      Base directory in which Spark events are logged, if spark.eventLog.enabled is true.
     </description>
   </property>
 
@@ -143,26 +138,10 @@
   </property>
 
   <property>
-    <name>spark.dynamicAllocation.initialExecutors</name>
-    <value>0</value>
+    <name>spark.executor.instances</name>
+    <value>2</value>
     <description>
-      Initial number of executors to run if dynamic allocation is enabled.
-    </description>
-  </property>
-
-  <property>
-    <name>spark.dynamicAllocation.maxExecutors</name>
-    <value>10</value>
-    <description>
-      Upper bound for the number of executors if dynamic allocation is enabled.
-    </description>
-  </property>
-
-  <property>
-    <name>spark.dynamicAllocation.minExecutors</name>
-    <value>0</value>
-    <description>
-      Lower bound for the number of executors if dynamic allocation is enabled.
+      The number of executor.
     </description>
   </property>
 


[06/39] ambari git commit: AMBARI-14806. Provide Metrics discovery API for AMS. (swagle)

Posted by nc...@apache.org.
AMBARI-14806. Provide Metrics discovery API for AMS. (swagle)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/646fb429
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/646fb429
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/646fb429

Branch: refs/heads/branch-dev-patch-upgrade
Commit: 646fb429f53adaa687dd9d4e7acdff617203caf6
Parents: 73b5399
Author: Siddharth Wagle <sw...@hortonworks.com>
Authored: Wed Jan 27 15:51:26 2016 -0800
Committer: Siddharth Wagle <sw...@hortonworks.com>
Committed: Wed Jan 27 15:51:26 2016 -0800

----------------------------------------------------------------------
 .../sink/timeline/MetadataException.java        |  28 ++
 .../sink/timeline/TimelineMetricMetadata.java   | 163 ++++++++++++
 .../timeline/HBaseTimelineMetricStore.java      |  73 ++++--
 .../metrics/timeline/PhoenixHBaseAccessor.java  | 253 ++++++++++++++++++-
 .../timeline/TimelineMetricConfiguration.java   |   9 +
 .../metrics/timeline/TimelineMetricStore.java   |  22 +-
 .../TimelineMetricAggregatorFactory.java        |   5 +-
 .../TimelineMetricAppAggregator.java            |  28 +-
 .../TimelineMetricClusterAggregatorSecond.java  |   5 +-
 .../discovery/TimelineMetricMetadataKey.java    |  56 ++++
 .../TimelineMetricMetadataManager.java          | 187 ++++++++++++++
 .../discovery/TimelineMetricMetadataSync.java   | 105 ++++++++
 .../timeline/query/PhoenixTransactSQL.java      |  37 ++-
 .../webapp/TimelineWebServices.java             |  59 ++++-
 .../TestApplicationHistoryServer.java           |  11 +-
 .../timeline/AbstractMiniHBaseClusterTest.java  |   8 +-
 .../timeline/ITPhoenixHBaseAccessor.java        |   9 +-
 .../timeline/TestTimelineMetricStore.java       |  14 +
 .../aggregators/ITClusterAggregator.java        |  16 +-
 .../timeline/discovery/TestMetadataManager.java | 112 ++++++++
 20 files changed, 1123 insertions(+), 77 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/646fb429/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/MetadataException.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/MetadataException.java b/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/MetadataException.java
new file mode 100644
index 0000000..01230af
--- /dev/null
+++ b/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/MetadataException.java
@@ -0,0 +1,28 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.metrics2.sink.timeline;
+
+/**
+ * Marker for checked Exceptions thrown from Metadata management layer.
+ */
+public class MetadataException extends Exception {
+  // Default constructor
+  public MetadataException(String message) {
+    super(message);
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/646fb429/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/TimelineMetricMetadata.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/TimelineMetricMetadata.java b/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/TimelineMetricMetadata.java
new file mode 100644
index 0000000..0624f9c
--- /dev/null
+++ b/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/TimelineMetricMetadata.java
@@ -0,0 +1,163 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.metrics2.sink.timeline;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+import org.codehaus.jackson.annotate.JsonIgnore;
+import javax.xml.bind.annotation.XmlAccessType;
+import javax.xml.bind.annotation.XmlAccessorType;
+import javax.xml.bind.annotation.XmlElement;
+import javax.xml.bind.annotation.XmlRootElement;
+
+@XmlRootElement(name = "metric_metadata")
+@XmlAccessorType(XmlAccessType.NONE)
+@InterfaceAudience.Public
+@InterfaceStability.Unstable
+public class TimelineMetricMetadata {
+  private String metricName;
+  private String appId;
+  private String units;
+  private MetricType type = MetricType.UNDEFINED;
+  private Long seriesStartTime;
+  boolean supportsAggregates = true;
+  // Serialization ignored helper flag
+  boolean isPersisted = false;
+
+  public enum MetricType {
+    GAUGE, // Can vary in both directions
+    COUNTER, // Single dimension
+    UNDEFINED // Default
+  }
+
+  // Default constructor
+  public TimelineMetricMetadata() {
+  }
+
+  public TimelineMetricMetadata(String metricName, String appId, String units,
+                                MetricType type, Long seriesStartTime,
+                                boolean supportsAggregates) {
+    this.metricName = metricName;
+    this.appId = appId;
+    this.units = units;
+    this.type = type;
+    this.seriesStartTime = seriesStartTime;
+    this.supportsAggregates = supportsAggregates;
+  }
+
+  @XmlElement(name = "metricname")
+  public String getMetricName() {
+    return metricName;
+  }
+
+  public void setMetricName(String metricName) {
+    this.metricName = metricName;
+  }
+
+  // This is the key for the webservice hence ignored.
+  //@XmlElement(name = "appid")
+  public String getAppId() {
+    return appId;
+  }
+
+  public void setAppId(String appId) {
+    this.appId = appId;
+  }
+
+  @XmlElement(name = "units")
+  public String getUnits() {
+    return units;
+  }
+
+  public void setUnits(String units) {
+    this.units = units;
+  }
+
+  @XmlElement(name = "type")
+  public MetricType getType() {
+    return type;
+  }
+
+  public void setType(MetricType type) {
+    this.type = type;
+  }
+
+  @XmlElement(name = "seriesStartTime")
+  public Long getSeriesStartTime() {
+    return seriesStartTime;
+  }
+
+  public void setSeriesStartTime(Long seriesStartTime) {
+    this.seriesStartTime = seriesStartTime;
+  }
+
+  @XmlElement(name = "supportsAggregation")
+  public boolean isSupportsAggregates() {
+    return supportsAggregates;
+  }
+
+  public void setSupportsAggregates(boolean supportsAggregates) {
+    this.supportsAggregates = supportsAggregates;
+  }
+
+  @JsonIgnore
+  public boolean isPersisted() {
+    return isPersisted;
+  }
+
+  public void setIsPersisted(boolean isPersisted) {
+    this.isPersisted = isPersisted;
+  }
+
+  /**
+   * Assumes the key of the object being compared is the same as @TimelineMetricMetadata
+   * @param metadata @TimelineMetricMetadata to be compared
+   */
+  public boolean needsToBeSynced(TimelineMetricMetadata metadata) throws MetadataException {
+    if (!this.metricName.equals(metadata.getMetricName()) ||
+        !this.appId.equals(metadata.getAppId())) {
+      throw new MetadataException("Unexpected argument: metricName = " +
+        metadata.getMetricName() + ", appId = " + metadata.getAppId());
+    }
+
+    // Series start time should never change
+    return (this.units != null && !this.units.equals(metadata.getUnits())) ||
+      (this.type != null && !this.type.equals(metadata.getType())) ||
+      //!this.lastRecordedTime.equals(metadata.getLastRecordedTime()) || // TODO: support
+      !this.supportsAggregates == metadata.isSupportsAggregates();
+  }
+
+  @Override
+  public boolean equals(Object o) {
+    if (this == o) return true;
+    if (o == null || getClass() != o.getClass()) return false;
+
+    TimelineMetricMetadata that = (TimelineMetricMetadata) o;
+
+    if (!metricName.equals(that.metricName)) return false;
+    return !(appId != null ? !appId.equals(that.appId) : that.appId != null);
+
+  }
+
+  @Override
+  public int hashCode() {
+    int result = metricName.hashCode();
+    result = 31 * result + (appId != null ? appId.hashCode() : 0);
+    return result;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/646fb429/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/HBaseTimelineMetricStore.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/HBaseTimelineMetricStore.java b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/HBaseTimelineMetricStore.java
index c4e946a..c30a354 100644
--- a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/HBaseTimelineMetricStore.java
+++ b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/HBaseTimelineMetricStore.java
@@ -23,12 +23,15 @@ import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.metrics2.sink.timeline.Precision;
 import org.apache.hadoop.metrics2.sink.timeline.TimelineMetric;
+import org.apache.hadoop.metrics2.sink.timeline.TimelineMetricMetadata;
 import org.apache.hadoop.metrics2.sink.timeline.TimelineMetrics;
 import org.apache.hadoop.service.AbstractService;
 import org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse;
 import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.aggregators.Function;
 import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.aggregators.TimelineMetricAggregator;
 import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.aggregators.TimelineMetricAggregatorFactory;
+import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.discovery.TimelineMetricMetadataKey;
+import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.discovery.TimelineMetricMetadataManager;
 import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.Condition;
 import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.DefaultCondition;
 
@@ -39,15 +42,10 @@ import java.util.Collections;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
+import java.util.Set;
 import java.util.TreeMap;
-import java.util.concurrent.ExecutionException;
 import java.util.concurrent.Executors;
-import java.util.concurrent.RejectedExecutionHandler;
 import java.util.concurrent.ScheduledExecutorService;
-import java.util.concurrent.ScheduledFuture;
-import java.util.concurrent.ScheduledThreadPoolExecutor;
-import java.util.concurrent.ThreadFactory;
-import java.util.concurrent.ThreadPoolExecutor;
 import java.util.concurrent.TimeUnit;
 
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.TimelineMetricConfiguration.USE_GROUPBY_AGGREGATOR_QUERIES;
@@ -58,8 +56,8 @@ public class HBaseTimelineMetricStore extends AbstractService implements Timelin
   private final TimelineMetricConfiguration configuration;
   private PhoenixHBaseAccessor hBaseAccessor;
   private static volatile boolean isInitialized = false;
-  private final ScheduledExecutorService executorService =
-    Executors.newSingleThreadScheduledExecutor();
+  private final ScheduledExecutorService executorService = Executors.newSingleThreadScheduledExecutor();
+  private TimelineMetricMetadataManager metricMetadataManager;
 
   /**
    * Construct the service.
@@ -81,6 +79,9 @@ public class HBaseTimelineMetricStore extends AbstractService implements Timelin
     if (!isInitialized) {
       hBaseAccessor = new PhoenixHBaseAccessor(hbaseConf, metricsConf);
       hBaseAccessor.initMetricSchema();
+      // Initialize metadata from store
+      metricMetadataManager = new TimelineMetricMetadataManager(hBaseAccessor, metricsConf);
+      metricMetadataManager.initializeMetadata();
 
       if (Boolean.parseBoolean(metricsConf.get(USE_GROUPBY_AGGREGATOR_QUERIES, "true"))) {
         LOG.info("Using group by aggregators for aggregating host and cluster metrics.");
@@ -88,7 +89,7 @@ public class HBaseTimelineMetricStore extends AbstractService implements Timelin
 
       // Start the cluster aggregator second
       TimelineMetricAggregator secondClusterAggregator =
-        TimelineMetricAggregatorFactory.createTimelineClusterAggregatorSecond(hBaseAccessor, metricsConf);
+        TimelineMetricAggregatorFactory.createTimelineClusterAggregatorSecond(hBaseAccessor, metricsConf, metricMetadataManager);
       if (!secondClusterAggregator.isDisabled()) {
         Thread aggregatorThread = new Thread(secondClusterAggregator);
         aggregatorThread.start();
@@ -188,8 +189,7 @@ public class HBaseTimelineMetricStore extends AbstractService implements Timelin
     TimelineMetrics metrics;
 
     if (hostnames == null || hostnames.isEmpty()) {
-      metrics = hBaseAccessor.getAggregateMetricRecords(condition,
-          metricFunctions);
+      metrics = hBaseAccessor.getAggregateMetricRecords(condition, metricFunctions);
     } else {
       metrics = hBaseAccessor.getMetricRecords(condition, metricFunctions);
     }
@@ -199,7 +199,7 @@ public class HBaseTimelineMetricStore extends AbstractService implements Timelin
   private TimelineMetrics postProcessMetrics(TimelineMetrics metrics) {
     List<TimelineMetric> metricsList = metrics.getMetrics();
 
-    for (TimelineMetric metric: metricsList){
+    for (TimelineMetric metric : metricsList){
       String name = metric.getMetricName();
       if (name.contains("._rate")){
         updateValueAsRate(metric.getMetricValues());
@@ -250,22 +250,17 @@ public class HBaseTimelineMetricStore extends AbstractService implements Timelin
         // fallback to VALUE, and fullMetricName
       }
 
-      addFunctionToMetricName(metricsFunctions, cleanMetricName, function);
+      List<Function> functionsList = metricsFunctions.get(cleanMetricName);
+      if (functionsList == null) {
+        functionsList = new ArrayList<Function>(1);
+      }
+      functionsList.add(function);
+      metricsFunctions.put(cleanMetricName, functionsList);
     }
 
     return metricsFunctions;
   }
 
-  private static void addFunctionToMetricName(
-    HashMap<String, List<Function>> metricsFunctions, String cleanMetricName,
-    Function function) {
-
-    List<Function> functionsList = metricsFunctions.get(cleanMetricName);
-    if (functionsList==null) functionsList = new ArrayList<Function>(1);
-    functionsList.add(function);
-    metricsFunctions.put(cleanMetricName, functionsList);
-  }
-
   @Override
   public TimelineMetric getTimelineMetric(String metricName, List<String> hostnames,
       String applicationId, String instanceId, Long startTime,
@@ -314,16 +309,38 @@ public class HBaseTimelineMetricStore extends AbstractService implements Timelin
     return metric;
   }
 
-
   @Override
-  public TimelinePutResponse putMetrics(TimelineMetrics metrics)
-    throws SQLException, IOException {
-
+  public TimelinePutResponse putMetrics(TimelineMetrics metrics) throws SQLException, IOException {
     // Error indicated by the Sql exception
     TimelinePutResponse response = new TimelinePutResponse();
 
-    hBaseAccessor.insertMetricRecords(metrics);
+    hBaseAccessor.insertMetricRecordsWithMetadata(metricMetadataManager, metrics);
 
     return response;
   }
+
+  @Override
+  public Map<String, List<TimelineMetricMetadata>> getTimelineMetricMetadata() throws SQLException, IOException {
+    Map<TimelineMetricMetadataKey, TimelineMetricMetadata> metadata =
+      metricMetadataManager.getMetadataCache();
+
+    // Group Metadata by AppId
+    Map<String, List<TimelineMetricMetadata>> metadataByAppId = new HashMap<>();
+    for (TimelineMetricMetadata metricMetadata : metadata.values()) {
+      List<TimelineMetricMetadata> metadataList = metadataByAppId.get(metricMetadata.getAppId());
+      if (metadataList == null) {
+        metadataList = new ArrayList<>();
+        metadataByAppId.put(metricMetadata.getAppId(), metadataList);
+      }
+
+      metadataList.add(metricMetadata);
+    }
+
+    return metadataByAppId;
+  }
+
+  @Override
+  public Map<String, Set<String>> getHostAppsMetadata() throws SQLException, IOException {
+    return metricMetadataManager.getHostedAppsCache();
+  }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/646fb429/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/PhoenixHBaseAccessor.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/PhoenixHBaseAccessor.java b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/PhoenixHBaseAccessor.java
index 8325fb1..980c4af 100644
--- a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/PhoenixHBaseAccessor.java
+++ b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/PhoenixHBaseAccessor.java
@@ -17,6 +17,7 @@
  */
 package org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline;
 
+import com.google.common.base.Enums;
 import org.apache.commons.lang.StringUtils;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
@@ -27,6 +28,7 @@ import org.apache.hadoop.hbase.util.RetryCounterFactory;
 import org.apache.hadoop.metrics2.sink.timeline.Precision;
 import org.apache.hadoop.metrics2.sink.timeline.SingleValuedTimelineMetric;
 import org.apache.hadoop.metrics2.sink.timeline.TimelineMetric;
+import org.apache.hadoop.metrics2.sink.timeline.TimelineMetricMetadata;
 import org.apache.hadoop.metrics2.sink.timeline.TimelineMetrics;
 import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.aggregators.AggregatorUtils;
 import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.aggregators.Function;
@@ -34,6 +36,8 @@ import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.
 import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.aggregators.MetricHostAggregate;
 import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.aggregators.TimelineClusterMetric;
 import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.aggregators.TimelineMetricReadHelper;
+import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.discovery.TimelineMetricMetadataKey;
+import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.discovery.TimelineMetricMetadataManager;
 import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.Condition;
 import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.ConnectionProvider;
 import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.DefaultPhoenixDataSource;
@@ -51,12 +55,18 @@ import java.sql.PreparedStatement;
 import java.sql.ResultSet;
 import java.sql.SQLException;
 import java.sql.Statement;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.HashSet;
 import java.util.List;
 import java.util.Map;
+import java.util.Set;
 import java.util.TreeMap;
 import java.util.concurrent.TimeUnit;
 
 import static java.util.concurrent.TimeUnit.SECONDS;
+import static org.apache.hadoop.metrics2.sink.timeline.TimelineMetricMetadata.*;
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.TimelineMetricConfiguration.AGGREGATE_TABLE_SPLIT_POINTS;
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.TimelineMetricConfiguration.AGGREGATORS_SKIP_BLOCK_CACHE;
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.TimelineMetricConfiguration.CLUSTER_DAILY_TABLE_TTL;
@@ -75,12 +85,16 @@ import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.ti
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.TimelineMetricConfiguration.PRECISION_TABLE_SPLIT_POINTS;
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.TimelineMetricConfiguration.PRECISION_TABLE_TTL;
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.PhoenixTransactSQL.ALTER_SQL;
+import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.PhoenixTransactSQL.CREATE_HOSTED_APPS_METADATA_TABLE_SQL;
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.PhoenixTransactSQL.CREATE_METRICS_AGGREGATE_TABLE_SQL;
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.PhoenixTransactSQL.CREATE_METRICS_CLUSTER_AGGREGATE_GROUPED_TABLE_SQL;
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.PhoenixTransactSQL.CREATE_METRICS_CLUSTER_AGGREGATE_TABLE_SQL;
+import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.PhoenixTransactSQL.CREATE_METRICS_METADATA_TABLE_SQL;
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.PhoenixTransactSQL.CREATE_METRICS_TABLE_SQL;
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.PhoenixTransactSQL.DEFAULT_ENCODING;
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.PhoenixTransactSQL.DEFAULT_TABLE_COMPRESSION;
+import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.PhoenixTransactSQL.GET_HOSTED_APPS_METADATA_SQL;
+import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.PhoenixTransactSQL.GET_METRIC_METADATA_SQL;
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.PhoenixTransactSQL.METRICS_AGGREGATE_DAILY_TABLE_NAME;
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.PhoenixTransactSQL.METRICS_AGGREGATE_HOURLY_TABLE_NAME;
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.PhoenixTransactSQL.METRICS_AGGREGATE_MINUTE_TABLE_NAME;
@@ -92,6 +106,8 @@ import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.ti
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.PhoenixTransactSQL.UPSERT_AGGREGATE_RECORD_SQL;
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.PhoenixTransactSQL.UPSERT_CLUSTER_AGGREGATE_SQL;
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.PhoenixTransactSQL.UPSERT_CLUSTER_AGGREGATE_TIME_SQL;
+import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.PhoenixTransactSQL.UPSERT_HOSTED_APPS_METADATA_SQL;
+import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.PhoenixTransactSQL.UPSERT_METADATA_SQL;
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.PhoenixTransactSQL.UPSERT_METRICS_SQL;
 
 /**
@@ -260,6 +276,14 @@ public class PhoenixHBaseAccessor {
       conn = getConnectionRetryingOnException();
       stmt = conn.createStatement();
 
+      // Metadata
+      String metadataSql = String.format(CREATE_METRICS_METADATA_TABLE_SQL,
+        encoding, compression);
+      stmt.executeUpdate(metadataSql);
+      String hostedAppSql = String.format(CREATE_HOSTED_APPS_METADATA_TABLE_SQL,
+        encoding, compression);
+      stmt.executeUpdate(hostedAppSql);
+
       // Host level
       String precisionSql = String.format(CREATE_METRICS_TABLE_SQL,
         encoding, precisionTtl, compression);
@@ -371,8 +395,8 @@ public class PhoenixHBaseAccessor {
     return "";
   }
 
-  public void insertMetricRecords(TimelineMetrics metrics) throws SQLException, IOException {
-
+  public void insertMetricRecordsWithMetadata(TimelineMetricMetadataManager metadataManager,
+                                              TimelineMetrics metrics) throws SQLException, IOException {
     List<TimelineMetric> timelineMetrics = metrics.getMetrics();
     if (timelineMetrics == null || timelineMetrics.isEmpty()) {
       LOG.debug("Empty metrics insert request.");
@@ -422,8 +446,16 @@ public class PhoenixHBaseAccessor {
 
         try {
           metricRecordStmt.executeUpdate();
+
+          // Write to metadata cache on successful write to store
+          metadataManager.putIfModifiedTimelineMetricMetadata(
+            metadataManager.getTimelineMetricMetadata(metric));
+
+          metadataManager.putIfModifiedHostedAppsMetadata(
+            metric.getHostName(), metric.getAppId());
+
         } catch (SQLException sql) {
-          LOG.error(sql);
+          LOG.error("Failed on insert records to store.", sql);
         }
       }
 
@@ -448,6 +480,10 @@ public class PhoenixHBaseAccessor {
     }
   }
 
+  public void insertMetricRecords(TimelineMetrics metrics) throws SQLException, IOException {
+    insertMetricRecordsWithMetadata(null, metrics);
+  }
+
   @SuppressWarnings("unchecked")
   public TimelineMetrics getMetricRecords(
     final Condition condition, Map<String, List<Function>> metricFunctions)
@@ -566,8 +602,7 @@ public class PhoenixHBaseAccessor {
       }
     }
     else {
-      TimelineMetric metric;
-      metric = TIMELINE_METRIC_READ_HELPER.getTimelineMetricFromResultSet(rs);
+      TimelineMetric metric = TIMELINE_METRIC_READ_HELPER.getTimelineMetricFromResultSet(rs);
 
       if (condition.isGrouped()) {
         metrics.addOrMergeTimelineMetric(metric);
@@ -1032,4 +1067,212 @@ public class PhoenixHBaseAccessor {
   public boolean isSkipBlockCacheForAggregatorsEnabled() {
     return skipBlockCacheForAggregatorsEnabled;
   }
+
+  /**
+   * One time save of metadata when discovering topology during aggregation.
+   * @throws SQLException
+   */
+  public void saveHostAppsMetadata(Map<String, Set<String>> hostedApps) throws SQLException {
+    Connection conn = getConnection();
+    PreparedStatement stmt = null;
+    try {
+      stmt = conn.prepareStatement(UPSERT_HOSTED_APPS_METADATA_SQL);
+      int rowCount = 0;
+
+      for (Map.Entry<String, Set<String>> hostedAppsEntry : hostedApps.entrySet()) {
+        if (LOG.isTraceEnabled()) {
+          LOG.trace("HostedAppsMetadata: " + hostedAppsEntry);
+        }
+
+        stmt.clearParameters();
+        stmt.setString(1, hostedAppsEntry.getKey());
+        stmt.setString(2, StringUtils.join(hostedAppsEntry.getValue(), ","));
+        try {
+          stmt.executeUpdate();
+          rowCount++;
+        } catch (SQLException sql) {
+          LOG.error("Error saving hosted apps metadata.", sql);
+        }
+      }
+
+      conn.commit();
+      LOG.info("Saved " + rowCount + " hosted apps metadata records.");
+
+    } finally {
+      if (stmt != null) {
+        try {
+          stmt.close();
+        } catch (SQLException e) {
+          // Ignore
+        }
+      }
+      if (conn != null) {
+        try {
+          conn.close();
+        } catch (SQLException sql) {
+          // Ignore
+        }
+      }
+    }
+  }
+
+  /**
+   * Save metdata on updates.
+   * @param metricMetadata @Collection<@TimelineMetricMetadata>
+   * @throws SQLException
+   */
+  public void saveMetricMetadata(Collection<TimelineMetricMetadata> metricMetadata) throws SQLException {
+    if (metricMetadata.isEmpty()) {
+      LOG.info("No metadata records to save.");
+      return;
+    }
+
+    Connection conn = getConnection();
+    PreparedStatement stmt = null;
+
+    try {
+      stmt = conn.prepareStatement(UPSERT_METADATA_SQL);
+      int rowCount = 0;
+
+      for (TimelineMetricMetadata metadata : metricMetadata) {
+        if (LOG.isTraceEnabled()) {
+          LOG.trace("TimelineMetricMetadata: metricName = " + metadata.getMetricName()
+            + ", appId = " + metadata.getAppId()
+            + ", seriesStartTime = " + metadata.getSeriesStartTime()
+          );
+        }
+
+        stmt.clearParameters();
+        stmt.setString(1, metadata.getMetricName());
+        stmt.setString(2, metadata.getAppId());
+        stmt.setString(3, metadata.getUnits());
+        stmt.setString(4, metadata.getType().name());
+        stmt.setLong(5, metadata.getSeriesStartTime());
+        stmt.setBoolean(6, metadata.isSupportsAggregates());
+
+        try {
+          stmt.executeUpdate();
+          rowCount++;
+        } catch (SQLException sql) {
+          LOG.error("Error saving metadata.", sql);
+        }
+      }
+
+      conn.commit();
+      LOG.info("Saved " + rowCount + " metadata records.");
+
+    } finally {
+      if (stmt != null) {
+        try {
+          stmt.close();
+        } catch (SQLException e) {
+          // Ignore
+        }
+      }
+      if (conn != null) {
+        try {
+          conn.close();
+        } catch (SQLException sql) {
+          // Ignore
+        }
+      }
+    }
+  }
+
+  public Map<String, Set<String>> getHostedAppsMetadata() throws SQLException {
+    Map<String, Set<String>> hostedAppMap = new HashMap<>();
+    Connection conn = getConnection();
+    PreparedStatement stmt = null;
+    ResultSet rs = null;
+
+    try {
+      stmt = conn.prepareStatement(GET_HOSTED_APPS_METADATA_SQL);
+      rs = stmt.executeQuery();
+
+      while (rs.next()) {
+        hostedAppMap.put(rs.getString("HOSTNAME"),
+          new HashSet<>(Arrays.asList(StringUtils.split(rs.getString("APP_IDS"), ","))));
+      }
+
+    } finally {
+      if (rs != null) {
+        try {
+          rs.close();
+        } catch (SQLException e) {
+          // Ignore
+        }
+      }
+      if (stmt != null) {
+        try {
+          stmt.close();
+        } catch (SQLException e) {
+          // Ignore
+        }
+      }
+      if (conn != null) {
+        try {
+          conn.close();
+        } catch (SQLException sql) {
+          // Ignore
+        }
+      }
+    }
+
+    return hostedAppMap;
+  }
+
+  // No filter criteria support for now.
+  public Map<TimelineMetricMetadataKey, TimelineMetricMetadata> getTimelineMetricMetadata() throws SQLException {
+    Map<TimelineMetricMetadataKey, TimelineMetricMetadata> metadataMap = new HashMap<>();
+    Connection conn = getConnection();
+    PreparedStatement stmt = null;
+    ResultSet rs = null;
+
+    try {
+      stmt = conn.prepareStatement(GET_METRIC_METADATA_SQL);
+      rs = stmt.executeQuery();
+
+      while (rs.next()) {
+        String metricName = rs.getString("METRIC_NAME");
+        String appId = rs.getString("APP_ID");
+        TimelineMetricMetadata metadata = new TimelineMetricMetadata(
+          metricName,
+          appId,
+          rs.getString("UNITS"),
+          Enums.getIfPresent(MetricType.class, rs.getString("TYPE")).or(MetricType.UNDEFINED),
+          rs.getLong("START_TIME"),
+          rs.getBoolean("SUPPORTS_AGGREGATION")
+        );
+
+        TimelineMetricMetadataKey key = new TimelineMetricMetadataKey(metricName, appId);
+        metadata.setIsPersisted(true); // Always true on retrieval
+        metadataMap.put(key, metadata);
+      }
+
+    } finally {
+      if (rs != null) {
+        try {
+          rs.close();
+        } catch (SQLException e) {
+          // Ignore
+        }
+      }
+      if (stmt != null) {
+        try {
+          stmt.close();
+        } catch (SQLException e) {
+          // Ignore
+        }
+      }
+      if (conn != null) {
+        try {
+          conn.close();
+        } catch (SQLException sql) {
+          // Ignore
+        }
+      }
+    }
+
+    return metadataMap;
+  }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/646fb429/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/TimelineMetricConfiguration.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/TimelineMetricConfiguration.java b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/TimelineMetricConfiguration.java
index ea48efe..46f61fb 100644
--- a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/TimelineMetricConfiguration.java
+++ b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/TimelineMetricConfiguration.java
@@ -202,6 +202,15 @@ public class TimelineMetricConfiguration {
   public static final String AGGREGATORS_SKIP_BLOCK_CACHE =
     "timeline.metrics.aggregators.skip.blockcache.enabled";
 
+  public static final String DISABLE_METRIC_METADATA_MGMT =
+    "timeline.metrics.service.metadata.management.disabled";
+
+  public static final String METRICS_METADATA_SYNC_INIT_DELAY =
+    "timeline.metrics.service.metadata.sync.init.delay";
+
+  public static final String METRICS_METADATA_SYNC_SCHEDULE_DELAY =
+    "timeline.metrics.service.metadata.sync.delay";
+
   public static final String HOST_APP_ID = "HOST";
 
   private Configuration hbaseConf;

http://git-wip-us.apache.org/repos/asf/ambari/blob/646fb429/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/TimelineMetricStore.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/TimelineMetricStore.java b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/TimelineMetricStore.java
index e062ca0..0aa102e 100644
--- a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/TimelineMetricStore.java
+++ b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/TimelineMetricStore.java
@@ -19,11 +19,14 @@ package org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline
 
 import org.apache.hadoop.metrics2.sink.timeline.Precision;
 import org.apache.hadoop.metrics2.sink.timeline.TimelineMetric;
+import org.apache.hadoop.metrics2.sink.timeline.TimelineMetricMetadata;
 import org.apache.hadoop.metrics2.sink.timeline.TimelineMetrics;
 import org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse;
 import java.io.IOException;
 import java.sql.SQLException;
 import java.util.List;
+import java.util.Map;
+import java.util.Set;
 
 public interface TimelineMetricStore {
   /**
@@ -67,6 +70,21 @@ public interface TimelineMetricStore {
    * @return An {@link org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse}.
    * @throws SQLException, IOException
    */
-  TimelinePutResponse putMetrics(TimelineMetrics metrics)
-    throws SQLException, IOException;
+  TimelinePutResponse putMetrics(TimelineMetrics metrics) throws SQLException, IOException;
+
+  /**
+   * Return all metrics metadata that have been written to the store.
+   * @return { appId : [ @TimelineMetricMetadata ] }
+   * @throws SQLException
+   * @throws IOException
+   */
+  Map<String, List<TimelineMetricMetadata>> getTimelineMetricMetadata() throws SQLException, IOException;
+
+  /**
+   * Returns all hosts that have written metrics with the apps on the host
+   * @return { hostname : [ appIds ] }
+   * @throws SQLException
+   * @throws IOException
+   */
+  Map<String, Set<String>> getHostAppsMetadata() throws SQLException, IOException;
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/646fb429/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/aggregators/TimelineMetricAggregatorFactory.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/aggregators/TimelineMetricAggregatorFactory.java b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/aggregators/TimelineMetricAggregatorFactory.java
index f0b2fda..cc85c56 100644
--- a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/aggregators/TimelineMetricAggregatorFactory.java
+++ b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/aggregators/TimelineMetricAggregatorFactory.java
@@ -20,6 +20,7 @@ package org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline
 import org.apache.commons.io.FilenameUtils;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.PhoenixHBaseAccessor;
+import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.discovery.TimelineMetricMetadataManager;
 
 import static java.util.concurrent.TimeUnit.SECONDS;
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.TimelineMetricConfiguration.CLUSTER_AGGREGATOR_DAILY_CHECKPOINT_CUTOFF_MULTIPLIER;
@@ -227,7 +228,8 @@ public class TimelineMetricAggregatorFactory {
    * Timeslice : 30 sec
    */
   public static TimelineMetricAggregator createTimelineClusterAggregatorSecond(
-    PhoenixHBaseAccessor hBaseAccessor, Configuration metricsConf) {
+    PhoenixHBaseAccessor hBaseAccessor, Configuration metricsConf,
+    TimelineMetricMetadataManager metadataManager) {
 
     String checkpointDir = metricsConf.get(
       TIMELINE_METRICS_AGGREGATOR_CHECKPOINT_DIR, DEFAULT_CHECKPOINT_LOCATION);
@@ -251,6 +253,7 @@ public class TimelineMetricAggregatorFactory {
     // Second based aggregation have added responsibility of time slicing
     return new TimelineMetricClusterAggregatorSecond(
       "TimelineClusterAggregatorSecond",
+      metadataManager,
       hBaseAccessor, metricsConf,
       checkpointLocation,
       sleepIntervalMillis,

http://git-wip-us.apache.org/repos/asf/ambari/blob/646fb429/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/aggregators/TimelineMetricAppAggregator.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/aggregators/TimelineMetricAppAggregator.java b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/aggregators/TimelineMetricAppAggregator.java
index 0c8ded2..05beb76 100644
--- a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/aggregators/TimelineMetricAppAggregator.java
+++ b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/aggregators/TimelineMetricAppAggregator.java
@@ -21,12 +21,17 @@ import org.apache.commons.lang.StringUtils;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.discovery.TimelineMetricMetadataManager;
+
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Collections;
 import java.util.HashMap;
+import java.util.HashSet;
 import java.util.List;
 import java.util.Map;
+import java.util.Set;
+
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.TimelineMetricConfiguration.CLUSTER_AGGREGATOR_APP_IDS;
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.TimelineMetricConfiguration.HOST_APP_ID;
 
@@ -40,13 +45,13 @@ public class TimelineMetricAppAggregator {
   private static final Log LOG = LogFactory.getLog(TimelineMetricAppAggregator.class);
   // Lookup to check candidacy of an app
   private final List<String> appIdsToAggregate;
-  // Map to lookup apps on a host
-  private Map<String, List<String>> hostedAppsMap = new HashMap<String, List<String>>();
-
+  private final Map<String, Set<String>> hostedAppsMap;
   Map<TimelineClusterMetric, MetricClusterAggregate> aggregateClusterMetrics;
 
-  public TimelineMetricAppAggregator(Configuration metricsConf) {
+  public TimelineMetricAppAggregator(TimelineMetricMetadataManager metadataManager,
+                                     Configuration metricsConf) {
     appIdsToAggregate = getAppIdsForHostAggregation(metricsConf);
+    hostedAppsMap = metadataManager.getHostedAppsCache();
     LOG.info("AppIds configured for aggregation: " + appIdsToAggregate);
   }
 
@@ -67,15 +72,6 @@ public class TimelineMetricAppAggregator {
   }
 
   /**
-   * Useful for resetting apps that no-longer need aggregation without restart.
-   */
-  public void destroy() {
-    LOG.debug("Cleanup aggregated data as well as in-memory state.");
-    aggregateClusterMetrics = null;
-    hostedAppsMap = new HashMap<String, List<String>>();
-  }
-
-  /**
    * Calculate aggregates if the clusterMetric is a Host metric for recorded
    * apps that are housed by this host.
    *
@@ -101,9 +97,9 @@ public class TimelineMetricAppAggregator {
       // Build the hostedapps map if not a host metric
       // Check app candidacy for host aggregation
       if (appIdsToAggregate.contains(appId)) {
-        List<String> appIds = hostedAppsMap.get(hostname);
+        Set<String> appIds = hostedAppsMap.get(hostname);
         if (appIds == null) {
-          appIds = new ArrayList<String>();
+          appIds = new HashSet<>();
           hostedAppsMap.put(hostname, appIds);
         }
         if (!appIds.contains(appId)) {
@@ -126,7 +122,7 @@ public class TimelineMetricAppAggregator {
       return;
     }
 
-    List<String> apps = hostedAppsMap.get(hostname);
+    Set<String> apps = hostedAppsMap.get(hostname);
     for (String appId : apps) {
       // Add a new cluster aggregate metric if none exists
       TimelineClusterMetric appTimelineClusterMetric =

http://git-wip-us.apache.org/repos/asf/ambari/blob/646fb429/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/aggregators/TimelineMetricClusterAggregatorSecond.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/aggregators/TimelineMetricClusterAggregatorSecond.java b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/aggregators/TimelineMetricClusterAggregatorSecond.java
index b26d3f0..ec141e7 100644
--- a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/aggregators/TimelineMetricClusterAggregatorSecond.java
+++ b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/aggregators/TimelineMetricClusterAggregatorSecond.java
@@ -23,6 +23,7 @@ import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.metrics2.sink.timeline.TimelineMetric;
 import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.PhoenixHBaseAccessor;
+import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.discovery.TimelineMetricMetadataManager;
 import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.Condition;
 import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.DefaultCondition;
 import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.PhoenixTransactSQL;
@@ -50,7 +51,9 @@ public class TimelineMetricClusterAggregatorSecond extends AbstractTimelineAggre
   // 1 minute client side buffering adjustment
   private final Long serverTimeShiftAdjustment;
 
+
   public TimelineMetricClusterAggregatorSecond(String aggregatorName,
+                                               TimelineMetricMetadataManager metadataManager,
                                                PhoenixHBaseAccessor hBaseAccessor,
                                                Configuration metricsConf,
                                                String checkpointLocation,
@@ -65,7 +68,7 @@ public class TimelineMetricClusterAggregatorSecond extends AbstractTimelineAggre
       sleepIntervalMillis, checkpointCutOffMultiplier, aggregatorDisabledParam,
       tableName, outputTableName, nativeTimeRangeDelay);
 
-    appAggregator = new TimelineMetricAppAggregator(metricsConf);
+    appAggregator = new TimelineMetricAppAggregator(metadataManager, metricsConf);
     this.timeSliceIntervalMillis = timeSliceInterval;
     this.serverTimeShiftAdjustment = Long.parseLong(metricsConf.get(SERVER_SIDE_TIMESIFT_ADJUSTMENT, "90000"));
   }

http://git-wip-us.apache.org/repos/asf/ambari/blob/646fb429/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/discovery/TimelineMetricMetadataKey.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/discovery/TimelineMetricMetadataKey.java b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/discovery/TimelineMetricMetadataKey.java
new file mode 100644
index 0000000..ec97ee5
--- /dev/null
+++ b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/discovery/TimelineMetricMetadataKey.java
@@ -0,0 +1,56 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.discovery;
+
+public class TimelineMetricMetadataKey {
+  String metricName;
+  String appId;
+
+  public TimelineMetricMetadataKey(String metricName, String appId) {
+    this.metricName = metricName;
+    this.appId = appId;
+  }
+
+  public String getMetricName() {
+    return metricName;
+  }
+
+  public String getAppId() {
+    return appId;
+  }
+
+  @Override
+  public boolean equals(Object o) {
+    if (this == o) return true;
+    if (o == null || getClass() != o.getClass()) return false;
+
+    TimelineMetricMetadataKey that = (TimelineMetricMetadataKey) o;
+
+    if (!metricName.equals(that.metricName)) return false;
+    return !(appId != null ? !appId.equals(that.appId) : that.appId != null);
+
+  }
+
+  @Override
+  public int hashCode() {
+    int result = metricName.hashCode();
+    result = 31 * result + (appId != null ? appId.hashCode() : 0);
+    return result;
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/646fb429/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/discovery/TimelineMetricMetadataManager.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/discovery/TimelineMetricMetadataManager.java b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/discovery/TimelineMetricMetadataManager.java
new file mode 100644
index 0000000..1c1a1dc
--- /dev/null
+++ b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/discovery/TimelineMetricMetadataManager.java
@@ -0,0 +1,187 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.discovery;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.metrics2.sink.timeline.MetadataException;
+import org.apache.hadoop.metrics2.sink.timeline.TimelineMetric;
+import org.apache.hadoop.metrics2.sink.timeline.TimelineMetricMetadata;
+import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.PhoenixHBaseAccessor;
+
+import java.sql.SQLException;
+import java.util.Collection;
+import java.util.HashSet;
+import java.util.Map;
+import java.util.Set;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.Executors;
+import java.util.concurrent.ScheduledExecutorService;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.atomic.AtomicBoolean;
+
+import static org.apache.hadoop.metrics2.sink.timeline.TimelineMetricMetadata.MetricType.UNDEFINED;
+import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.TimelineMetricConfiguration.DISABLE_METRIC_METADATA_MGMT;
+import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.TimelineMetricConfiguration.METRICS_METADATA_SYNC_INIT_DELAY;
+import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.TimelineMetricConfiguration.METRICS_METADATA_SYNC_SCHEDULE_DELAY;
+
+public class TimelineMetricMetadataManager {
+  private static final Log LOG = LogFactory.getLog(TimelineMetricMetadataManager.class);
+  private boolean isDisabled = false;
+  // Cache all metadata on retrieval
+  private final Map<TimelineMetricMetadataKey, TimelineMetricMetadata> METADATA_CACHE = new ConcurrentHashMap<>();
+  // Map to lookup apps on a host
+  private final Map<String, Set<String>> HOSTED_APPS_MAP = new ConcurrentHashMap<>();
+  // Sync only when needed
+  AtomicBoolean SYNC_HOSTED_APPS_METADATA = new AtomicBoolean(false);
+
+  // Single thread to sync back new writes to the store
+  private final ScheduledExecutorService executorService = Executors.newSingleThreadScheduledExecutor();
+
+  private PhoenixHBaseAccessor hBaseAccessor;
+  private Configuration metricsConf;
+
+  public TimelineMetricMetadataManager(PhoenixHBaseAccessor hBaseAccessor,
+                                       Configuration metricsConf) {
+    this.hBaseAccessor = hBaseAccessor;
+    this.metricsConf = metricsConf;
+  }
+
+  /**
+   * Initialize Metadata from the store
+   */
+  public void initializeMetadata() {
+    if (metricsConf.getBoolean(DISABLE_METRIC_METADATA_MGMT, false)) {
+      isDisabled = true;
+    } else {
+      // Schedule the executor to sync to store
+      executorService.scheduleWithFixedDelay(new TimelineMetricMetadataSync(this),
+        metricsConf.getInt(METRICS_METADATA_SYNC_INIT_DELAY, 120), // 2 minutes
+        metricsConf.getInt(METRICS_METADATA_SYNC_SCHEDULE_DELAY, 300), // 5 minutes
+        TimeUnit.SECONDS);
+      // Read from store and initialize map
+      try {
+        Map<TimelineMetricMetadataKey, TimelineMetricMetadata> metadata =
+          hBaseAccessor.getTimelineMetricMetadata();
+
+        LOG.info("Retrieved " + metadata.size() + ", metadata objects from store.");
+        // Store in the cache
+        METADATA_CACHE.putAll(metadata);
+
+        Map<String, Set<String>> hostedAppData = hBaseAccessor.getHostedAppsMetadata();
+
+        LOG.info("Retrieved " + hostedAppData.size() + " host objects from store.");
+        HOSTED_APPS_MAP.putAll(hostedAppData);
+
+      } catch (SQLException e) {
+        LOG.warn("Exception loading metric metadata", e);
+      }
+    }
+  }
+
+  public Map<TimelineMetricMetadataKey, TimelineMetricMetadata> getMetadataCache() {
+    return METADATA_CACHE;
+  }
+
+  public Map<String, Set<String>> getHostedAppsCache() {
+    return HOSTED_APPS_MAP;
+  }
+
+  public boolean syncHostedAppsMetadata() {
+    return SYNC_HOSTED_APPS_METADATA.get();
+  }
+
+  public void markSuccessOnSyncHostedAppsMetadata() {
+    SYNC_HOSTED_APPS_METADATA.set(false);
+  }
+
+  /**
+   * Update value in metadata cache
+   * @param metadata @TimelineMetricMetadata
+   */
+  public void putIfModifiedTimelineMetricMetadata(TimelineMetricMetadata metadata) {
+    TimelineMetricMetadataKey key = new TimelineMetricMetadataKey(
+      metadata.getMetricName(), metadata.getAppId());
+
+    TimelineMetricMetadata metadataFromCache = METADATA_CACHE.get(key);
+
+    if (metadataFromCache != null) {
+      try {
+        if (metadataFromCache.needsToBeSynced(metadata)) {
+          metadata.setIsPersisted(false); // Set the flag to ensure sync to store on next run
+          METADATA_CACHE.put(key, metadata);
+        }
+      } catch (MetadataException e) {
+        LOG.warn("Error inserting Metadata in cache.", e);
+      }
+
+    } else {
+      METADATA_CACHE.put(key, metadata);
+    }
+  }
+
+  /**
+   * Update value in hosted apps cache
+   * @param hostname Host name
+   * @param appId Application Id
+   */
+  public void putIfModifiedHostedAppsMetadata(String hostname, String appId) {
+    Set<String> apps = HOSTED_APPS_MAP.get(hostname);
+    if (apps == null) {
+      apps = new HashSet<>();
+      HOSTED_APPS_MAP.put(hostname, apps);
+    }
+
+    if (!apps.contains(appId)) {
+      apps.add(appId);
+      SYNC_HOSTED_APPS_METADATA.set(true);
+    }
+  }
+
+  public void persistMetadata(Collection<TimelineMetricMetadata> metadata) throws SQLException {
+    hBaseAccessor.saveMetricMetadata(metadata);
+  }
+
+  public void persistHostedAppsMetadata(Map<String, Set<String>> hostedApps) throws SQLException {
+    hBaseAccessor.saveHostAppsMetadata(hostedApps);
+  }
+
+  public TimelineMetricMetadata getTimelineMetricMetadata(TimelineMetric timelineMetric) {
+    return new TimelineMetricMetadata(
+      timelineMetric.getMetricName(),
+      timelineMetric.getAppId(),
+      timelineMetric.getType(), // Present type and unit are synonyms
+      UNDEFINED, // TODO: Add support for types in the application
+      timelineMetric.getStartTime(),
+      true
+    );
+  }
+
+  /**
+   * Fetch hosted apps from store
+   * @throws SQLException
+   */
+  Map<String, Set<String>> getPersistedHostedAppsData() throws SQLException {
+    return hBaseAccessor.getHostedAppsMetadata();
+  }
+
+  public boolean isDisabled() {
+    return isDisabled;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/646fb429/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/discovery/TimelineMetricMetadataSync.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/discovery/TimelineMetricMetadataSync.java b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/discovery/TimelineMetricMetadataSync.java
new file mode 100644
index 0000000..54ea200
--- /dev/null
+++ b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/discovery/TimelineMetricMetadataSync.java
@@ -0,0 +1,105 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.discovery;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.metrics2.sink.timeline.TimelineMetricMetadata;
+import java.sql.SQLException;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+/**
+ * Sync metadata info with the store
+ */
+public class TimelineMetricMetadataSync implements Runnable {
+  private static final Log LOG = LogFactory.getLog(TimelineMetricMetadataSync.class);
+
+  private final TimelineMetricMetadataManager cacheManager;
+
+  public TimelineMetricMetadataSync(TimelineMetricMetadataManager cacheManager) {
+    this.cacheManager = cacheManager;
+  }
+
+  @Override
+  public void run() {
+    List<TimelineMetricMetadata> metadataToPersist = new ArrayList<>();
+    // Find all entries to persist
+    for (TimelineMetricMetadata metadata : cacheManager.getMetadataCache().values()) {
+      if (!metadata.isPersisted()) {
+        metadataToPersist.add(metadata);
+      }
+    }
+    boolean markSuccess = false;
+    if (!metadataToPersist.isEmpty()) {
+      try {
+        cacheManager.persistMetadata(metadataToPersist);
+        markSuccess = true;
+      } catch (SQLException e) {
+        LOG.warn("Error persisting metadata.", e);
+      }
+    }
+    // Mark corresponding entries as persisted to skip on next run
+    if (markSuccess) {
+      for (TimelineMetricMetadata metadata : metadataToPersist) {
+        TimelineMetricMetadataKey key = new TimelineMetricMetadataKey(
+          metadata.getMetricName(), metadata.getAppId()
+        );
+
+        // Mark entry as being persisted
+        metadata.setIsPersisted(true);
+        // Update cache
+        cacheManager.getMetadataCache().put(key, metadata);
+      }
+    }
+    // Sync hosted apps data is needed
+    if (cacheManager.syncHostedAppsMetadata()) {
+      Map<String, Set<String>> persistedData = null;
+      try {
+        persistedData = cacheManager.getPersistedHostedAppsData();
+      } catch (SQLException e) {
+        LOG.warn("Failed on fetching hosted apps data from store.", e);
+        return; // Something wrong with store
+      }
+
+      Map<String, Set<String>> cachedData = cacheManager.getHostedAppsCache();
+      Map<String, Set<String>> dataToSync = new HashMap<>();
+      if (cachedData != null && !cachedData.isEmpty()) {
+        for (Map.Entry<String, Set<String>> cacheEntry : cachedData.entrySet()) {
+          // No persistence / stale data in store
+          if (persistedData == null || persistedData.isEmpty() ||
+              !persistedData.containsKey(cacheEntry.getKey()) ||
+              !persistedData.get(cacheEntry.getKey()).containsAll(cacheEntry.getValue())) {
+            dataToSync.put(cacheEntry.getKey(), cacheEntry.getValue());
+          }
+        }
+        try {
+          cacheManager.persistHostedAppsMetadata(dataToSync);
+          cacheManager.markSuccessOnSyncHostedAppsMetadata();
+
+        } catch (SQLException e) {
+          LOG.warn("Error persisting hosted apps metadata.", e);
+        }
+      }
+
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/646fb429/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/query/PhoenixTransactSQL.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/query/PhoenixTransactSQL.java b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/query/PhoenixTransactSQL.java
index fa9fd73..cd1bfb3 100644
--- a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/query/PhoenixTransactSQL.java
+++ b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/query/PhoenixTransactSQL.java
@@ -22,10 +22,12 @@ import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.metrics2.sink.timeline.PrecisionLimitExceededException;
 import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.PhoenixHBaseAccessor;
 import org.apache.hadoop.metrics2.sink.timeline.Precision;
+import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.discovery.TimelineMetricMetadataKey;
 
 import java.sql.Connection;
 import java.sql.PreparedStatement;
 import java.sql.SQLException;
+import java.util.Collection;
 import java.util.concurrent.TimeUnit;
 
 /**
@@ -102,6 +104,23 @@ public class PhoenixTransactSQL {
       "SERVER_TIME)) DATA_BLOCK_ENCODING='%s', IMMUTABLE_ROWS=true, " +
       "TTL=%s, COMPRESSION='%s'";
 
+  public static final String CREATE_METRICS_METADATA_TABLE_SQL =
+    "CREATE TABLE IF NOT EXISTS METRICS_METADATA " +
+      "(METRIC_NAME VARCHAR, " +
+      "APP_ID VARCHAR, " +
+      "UNITS CHAR(20), " +
+      "TYPE CHAR(20), " +
+      "START_TIME UNSIGNED_LONG, " +
+      "SUPPORTS_AGGREGATION BOOLEAN " +
+      "CONSTRAINT pk PRIMARY KEY (METRIC_NAME, APP_ID)) " +
+      "DATA_BLOCK_ENCODING='%s', COMPRESSION='%s'";
+
+  public static final String CREATE_HOSTED_APPS_METADATA_TABLE_SQL =
+    "CREATE TABLE IF NOT EXISTS HOSTED_APPS_METADATA " +
+      "(HOSTNAME VARCHAR, APP_IDS VARCHAR, " +
+      "CONSTRAINT pk PRIMARY KEY (HOSTNAME))" +
+      "DATA_BLOCK_ENCODING='%s', COMPRESSION='%s'";
+
   /**
    * ALTER table to set new options
    */
@@ -148,6 +167,14 @@ public class PhoenixTransactSQL {
     "METRIC_COUNT) " +
     "VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)";
 
+  public static final String UPSERT_METADATA_SQL =
+    "UPSERT INTO METRICS_METADATA (METRIC_NAME, APP_ID, UNITS, TYPE, " +
+      "START_TIME, SUPPORTS_AGGREGATION) " +
+      "VALUES (?, ?, ?, ?, ?, ?)";
+
+  public static final String UPSERT_HOSTED_APPS_METADATA_SQL =
+    "UPSERT INTO HOSTED_APPS_METADATA (HOSTNAME, APP_IDS) VALUES (?, ?)";
+
   /**
    * Retrieve a set of rows from metrics records table.
    */
@@ -217,6 +244,13 @@ public class PhoenixTransactSQL {
     "METRIC_MIN " +
     "FROM %s";
 
+  public static final String GET_METRIC_METADATA_SQL = "SELECT " +
+    "METRIC_NAME, APP_ID, UNITS, TYPE, START_TIME, " +
+    "SUPPORTS_AGGREGATION FROM METRICS_METADATA";
+
+  public static final String GET_HOSTED_APPS_METADATA_SQL = "SELECT " +
+    "HOSTNAME, APP_IDS FROM HOSTED_APPS_METADATA";
+
   /**
    * Aggregate host metrics using a GROUP BY clause to take advantage of
    * N - way parallel scan where N = number of regions.
@@ -491,8 +525,7 @@ public class PhoenixTransactSQL {
   }
 
   private static PreparedStatement setQueryParameters(PreparedStatement stmt,
-                                                      Condition condition)
-    throws SQLException {
+                                                      Condition condition) throws SQLException {
     int pos = 1;
     //For GET_LATEST_METRIC_SQL_SINGLE_HOST parameters should be set 2 times
     do {

http://git-wip-us.apache.org/repos/asf/ambari/blob/646fb429/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/TimelineWebServices.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/TimelineWebServices.java b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/TimelineWebServices.java
index 51535b2..e9d77cc 100644
--- a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/TimelineWebServices.java
+++ b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/TimelineWebServices.java
@@ -20,11 +20,13 @@ package org.apache.hadoop.yarn.server.applicationhistoryservice.webapp;
 
 import com.google.inject.Inject;
 import com.google.inject.Singleton;
+import org.apache.commons.lang.StringUtils;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience.Public;
 import org.apache.hadoop.classification.InterfaceStability.Unstable;
 import org.apache.hadoop.metrics2.sink.timeline.PrecisionLimitExceededException;
+import org.apache.hadoop.metrics2.sink.timeline.TimelineMetricMetadata;
 import org.apache.hadoop.yarn.api.records.timeline.TimelineEntities;
 import org.apache.hadoop.yarn.api.records.timeline.TimelineEntity;
 import org.apache.hadoop.yarn.api.records.timeline.TimelineEvents;
@@ -65,6 +67,7 @@ import java.util.Collection;
 import java.util.EnumSet;
 import java.util.HashSet;
 import java.util.List;
+import java.util.Map;
 import java.util.Set;
 import java.util.SortedSet;
 import java.util.TreeSet;
@@ -149,15 +152,15 @@ public class TimelineWebServices {
     TimelineEntities entities = null;
     try {
       entities = store.getEntities(
-          parseStr(entityType),
-          parseLongStr(limit),
-          parseLongStr(windowStart),
-          parseLongStr(windowEnd),
-          parseStr(fromId),
-          parseLongStr(fromTs),
-          parsePairStr(primaryFilter, ":"),
-          parsePairsStr(secondaryFilter, ",", ":"),
-          parseFieldsStr(fields, ","));
+        parseStr(entityType),
+        parseLongStr(limit),
+        parseLongStr(windowStart),
+        parseLongStr(windowEnd),
+        parseStr(fromId),
+        parseLongStr(fromTs),
+        parsePairStr(primaryFilter, ":"),
+        parsePairsStr(secondaryFilter, ",", ":"),
+        parseFieldsStr(fields, ","));
     } catch (NumberFormatException e) {
       throw new BadRequestException(
           "windowStart, windowEnd or limit is not a numeric value.");
@@ -339,11 +342,11 @@ public class TimelineWebServices {
    * @param precision Precision [ seconds, minutes, hours ]
    * @param limit limit on total number of {@link TimelineMetric} records
    *              retrieved.
-   * @return {@link TimelineMetrics}
+   * @return {@link @TimelineMetrics}
    */
   @GET
   @Path("/metrics")
-  @Produces({ MediaType.APPLICATION_JSON /* , MediaType.APPLICATION_XML */})
+  @Produces({ MediaType.APPLICATION_JSON })
   public TimelineMetrics getTimelineMetrics(
     @Context HttpServletRequest req,
     @Context HttpServletResponse res,
@@ -387,11 +390,41 @@ public class TimelineWebServices {
       throw new WebApplicationException(sql,
         Response.Status.INTERNAL_SERVER_ERROR);
     } catch (IOException io) {
-      throw new WebApplicationException(io,
-        Response.Status.INTERNAL_SERVER_ERROR);
+      throw new WebApplicationException(io, Response.Status.INTERNAL_SERVER_ERROR);
     }
   }
 
+  @GET
+  @Path("/metrics/metadata")
+  @Produces({ MediaType.APPLICATION_JSON })
+  public Map<String, List<TimelineMetricMetadata>> getTimelineMetricMetadata(
+    @Context HttpServletRequest req,
+    @Context HttpServletResponse res
+  ) {
+    init(res);
+
+    try {
+      return timelineMetricStore.getTimelineMetricMetadata();
+    } catch (Exception e) {
+      throw new WebApplicationException(e, Response.Status.INTERNAL_SERVER_ERROR);
+    }
+  }
+
+  @GET
+  @Path("/metrics/hosts")
+  @Produces({ MediaType.APPLICATION_JSON })
+  public Map<String, Set<String>> getHostedAppsMetadata(
+    @Context HttpServletRequest req,
+    @Context HttpServletResponse res
+  ) {
+    init(res);
+
+    try {
+      return timelineMetricStore.getHostAppsMetadata();
+    } catch (Exception e) {
+      throw new WebApplicationException(e, Response.Status.INTERNAL_SERVER_ERROR);
+    }
+  }
 
   /**
    * Store the given entities into the timeline store, and return the errors

http://git-wip-us.apache.org/repos/asf/ambari/blob/646fb429/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestApplicationHistoryServer.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestApplicationHistoryServer.java b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestApplicationHistoryServer.java
index a8bbc73..524ed2b 100644
--- a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestApplicationHistoryServer.java
+++ b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestApplicationHistoryServer.java
@@ -44,6 +44,8 @@ import java.net.URL;
 import java.net.URLClassLoader;
 import java.sql.Connection;
 import java.sql.DriverManager;
+import java.sql.PreparedStatement;
+import java.sql.ResultSet;
 import java.sql.Statement;
 
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics
@@ -156,15 +158,22 @@ public class TestApplicationHistoryServer {
 
     Connection connection = createNiceMock(Connection.class);
     Statement stmt = createNiceMock(Statement.class);
+    PreparedStatement preparedStatement = createNiceMock(PreparedStatement.class);
+    ResultSet rs = createNiceMock(ResultSet.class);
     mockStatic(DriverManager.class);
     expect(DriverManager.getConnection("jdbc:phoenix:localhost:2181:/ams-hbase-unsecure"))
       .andReturn(connection).anyTimes();
     expect(connection.createStatement()).andReturn(stmt).anyTimes();
+    expect(connection.prepareStatement(anyString())).andReturn(preparedStatement).anyTimes();
     suppress(method(Statement.class, "executeUpdate", String.class));
+    expect(preparedStatement.executeQuery()).andReturn(rs).anyTimes();
+    expect(rs.next()).andReturn(false).anyTimes();
+    preparedStatement.close();
+    expectLastCall().anyTimes();
     connection.close();
     expectLastCall();
 
-    EasyMock.replay(connection, stmt);
+    EasyMock.replay(connection, stmt, preparedStatement, rs);
     replayAll();
 
     historyServer = new ApplicationHistoryServer();

http://git-wip-us.apache.org/repos/asf/ambari/blob/646fb429/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/AbstractMiniHBaseClusterTest.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/AbstractMiniHBaseClusterTest.java b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/AbstractMiniHBaseClusterTest.java
index e73c741..8cbc56b 100644
--- a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/AbstractMiniHBaseClusterTest.java
+++ b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/AbstractMiniHBaseClusterTest.java
@@ -99,10 +99,16 @@ public abstract class AbstractMiniHBaseClusterTest extends BaseTest {
       stmt = conn.createStatement();
 
       stmt.execute("delete from METRIC_AGGREGATE");
+      stmt.execute("delete from METRIC_AGGREGATE_MINUTE");
       stmt.execute("delete from METRIC_AGGREGATE_HOURLY");
+      stmt.execute("delete from METRIC_AGGREGATE_DAILY");
       stmt.execute("delete from METRIC_RECORD");
-      stmt.execute("delete from METRIC_RECORD_HOURLY");
       stmt.execute("delete from METRIC_RECORD_MINUTE");
+      stmt.execute("delete from METRIC_RECORD_HOURLY");
+      stmt.execute("delete from METRIC_RECORD_DAILY");
+      stmt.execute("delete from METRICS_METADATA");
+      stmt.execute("delete from HOSTED_APPS_METADATA");
+
       conn.commit();
     } finally {
       if (stmt != null) {

http://git-wip-us.apache.org/repos/asf/ambari/blob/646fb429/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/ITPhoenixHBaseAccessor.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/ITPhoenixHBaseAccessor.java b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/ITPhoenixHBaseAccessor.java
index 5e7234c..0522f81 100644
--- a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/ITPhoenixHBaseAccessor.java
+++ b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/ITPhoenixHBaseAccessor.java
@@ -27,11 +27,13 @@ import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.
 import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.aggregators.TimelineClusterMetric;
 import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.aggregators.TimelineMetricAggregator;
 import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.aggregators.TimelineMetricAggregatorFactory;
+import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.discovery.TimelineMetricMetadataManager;
 import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.Condition;
 import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.DefaultCondition;
 import org.junit.After;
 import org.junit.Before;
 import org.junit.Test;
+
 import java.io.IOException;
 import java.sql.Connection;
 import java.sql.SQLException;
@@ -41,6 +43,7 @@ import java.util.HashMap;
 import java.util.Iterator;
 import java.util.List;
 import java.util.Map;
+
 import static junit.framework.Assert.assertEquals;
 import static junit.framework.Assert.assertTrue;
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.MetricTestHelper.createEmptyTimelineClusterMetric;
@@ -204,7 +207,8 @@ public class ITPhoenixHBaseAccessor extends AbstractMiniHBaseClusterTest {
   public void testGetClusterMetricRecordsSeconds() throws Exception {
     // GIVEN
     TimelineMetricAggregator agg =
-      TimelineMetricAggregatorFactory.createTimelineClusterAggregatorSecond(hdb, new Configuration());
+      TimelineMetricAggregatorFactory.createTimelineClusterAggregatorSecond(
+        hdb, new Configuration(), new TimelineMetricMetadataManager(hdb, new Configuration()));
 
     long startTime = System.currentTimeMillis();
     long ctime = startTime + 1;
@@ -243,7 +247,8 @@ public class ITPhoenixHBaseAccessor extends AbstractMiniHBaseClusterTest {
   public void testGetClusterMetricRecordLatestWithFunction() throws Exception {
     // GIVEN
     TimelineMetricAggregator agg =
-      TimelineMetricAggregatorFactory.createTimelineClusterAggregatorSecond(hdb, new Configuration());
+      TimelineMetricAggregatorFactory.createTimelineClusterAggregatorSecond
+        (hdb, new Configuration(), new TimelineMetricMetadataManager(hdb, new Configuration()));
 
     long startTime = System.currentTimeMillis();
     long ctime = startTime + 1;

http://git-wip-us.apache.org/repos/asf/ambari/blob/646fb429/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/TestTimelineMetricStore.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/TestTimelineMetricStore.java b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/TestTimelineMetricStore.java
index 7c8138b..8f8067b 100644
--- a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/TestTimelineMetricStore.java
+++ b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/TestTimelineMetricStore.java
@@ -19,13 +19,17 @@ package org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline
 
 import org.apache.hadoop.metrics2.sink.timeline.Precision;
 import org.apache.hadoop.metrics2.sink.timeline.TimelineMetric;
+import org.apache.hadoop.metrics2.sink.timeline.TimelineMetricMetadata;
 import org.apache.hadoop.metrics2.sink.timeline.TimelineMetrics;
 import org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse;
 import java.io.IOException;
 import java.sql.SQLException;
 import java.util.ArrayList;
+import java.util.Collections;
 import java.util.HashMap;
 import java.util.List;
+import java.util.Map;
+import java.util.Set;
 import java.util.TreeMap;
 
 public class TestTimelineMetricStore implements TimelineMetricStore {
@@ -80,4 +84,14 @@ public class TestTimelineMetricStore implements TimelineMetricStore {
 
     return new TimelinePutResponse();
   }
+
+  @Override
+  public Map<String, List<TimelineMetricMetadata>> getTimelineMetricMetadata() throws SQLException, IOException {
+    return null;
+  }
+
+  @Override
+  public Map<String, Set<String>> getHostAppsMetadata() throws SQLException, IOException {
+    return Collections.emptyMap();
+  }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/646fb429/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/aggregators/ITClusterAggregator.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/aggregators/ITClusterAggregator.java b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/aggregators/ITClusterAggregator.java
index 6672dae..f201224 100644
--- a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/aggregators/ITClusterAggregator.java
+++ b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/aggregators/ITClusterAggregator.java
@@ -31,6 +31,7 @@ import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.
 import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.aggregators.TimelineMetricAggregator;
 import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.aggregators.TimelineMetricAggregatorFactory;
 import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.aggregators.TimelineMetricReadHelper;
+import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.discovery.TimelineMetricMetadataManager;
 import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.Condition;
 import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.DefaultCondition;
 import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.PhoenixTransactSQL;
@@ -75,7 +76,8 @@ public class ITClusterAggregator extends AbstractMiniHBaseClusterTest {
   public void testShouldAggregateClusterProperly() throws Exception {
     // GIVEN
     TimelineMetricAggregator agg =
-      TimelineMetricAggregatorFactory.createTimelineClusterAggregatorSecond(hdb, getConfigurationForTest(false));
+      TimelineMetricAggregatorFactory.createTimelineClusterAggregatorSecond(hdb,
+        getConfigurationForTest(false), new TimelineMetricMetadataManager(hdb, new Configuration()));
     TimelineMetricReadHelper readHelper = new TimelineMetricReadHelper(false);
 
     long startTime = System.currentTimeMillis();
@@ -127,7 +129,8 @@ public class ITClusterAggregator extends AbstractMiniHBaseClusterTest {
   public void testShouldAggregateClusterIgnoringInstance() throws Exception {
     // GIVEN
     TimelineMetricAggregator agg =
-      TimelineMetricAggregatorFactory.createTimelineClusterAggregatorSecond(hdb, getConfigurationForTest(false));
+      TimelineMetricAggregatorFactory.createTimelineClusterAggregatorSecond(hdb,
+        getConfigurationForTest(false), new TimelineMetricMetadataManager(hdb, new Configuration()));
     TimelineMetricReadHelper readHelper = new TimelineMetricReadHelper(false);
 
     long startTime = System.currentTimeMillis();
@@ -202,7 +205,8 @@ public class ITClusterAggregator extends AbstractMiniHBaseClusterTest {
   public void testShouldAggregateDifferentMetricsOnClusterProperly() throws Exception {
     // GIVEN
     TimelineMetricAggregator agg =
-      TimelineMetricAggregatorFactory.createTimelineClusterAggregatorSecond(hdb, getConfigurationForTest(false));
+      TimelineMetricAggregatorFactory.createTimelineClusterAggregatorSecond(hdb,
+        getConfigurationForTest(false), new TimelineMetricMetadataManager(hdb, new Configuration()));
     TimelineMetricReadHelper readHelper = new TimelineMetricReadHelper(false);
 
     // here we put some metrics tha will be aggregated
@@ -485,7 +489,8 @@ public class ITClusterAggregator extends AbstractMiniHBaseClusterTest {
     Configuration conf = getConfigurationForTest(false);
     conf.set(CLUSTER_AGGREGATOR_APP_IDS, "app1");
     TimelineMetricAggregator agg =
-      TimelineMetricAggregatorFactory.createTimelineClusterAggregatorSecond(hdb, conf);
+      TimelineMetricAggregatorFactory.createTimelineClusterAggregatorSecond(hdb,
+        conf, new TimelineMetricMetadataManager(hdb, new Configuration()));
     TimelineMetricReadHelper readHelper = new TimelineMetricReadHelper(false);
 
     long startTime = System.currentTimeMillis();
@@ -536,7 +541,8 @@ public class ITClusterAggregator extends AbstractMiniHBaseClusterTest {
   @Test
   public void testClusterAggregateMetricNormalization() throws Exception {
     TimelineMetricAggregator agg =
-      TimelineMetricAggregatorFactory.createTimelineClusterAggregatorSecond(hdb, getConfigurationForTest(false));
+      TimelineMetricAggregatorFactory.createTimelineClusterAggregatorSecond(hdb,
+        getConfigurationForTest(false), new TimelineMetricMetadataManager(hdb, new Configuration()));
     TimelineMetricReadHelper readHelper = new TimelineMetricReadHelper(false);
 
     // Sample data


[27/39] ambari git commit: AMBARI-14848 : AMS service has critical alert after upgrade to 2.2.1.0. (avijayan)

Posted by nc...@apache.org.
AMBARI-14848 : AMS service has critical alert after upgrade to 2.2.1.0. (avijayan)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/44c98665
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/44c98665
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/44c98665

Branch: refs/heads/branch-dev-patch-upgrade
Commit: 44c98665a4bf9c76434741d1bdae1f1a35145f45
Parents: 69c8341
Author: Aravindan Vijayan <av...@hortonworks.com>
Authored: Fri Jan 29 14:10:01 2016 -0800
Committer: Aravindan Vijayan <av...@hortonworks.com>
Committed: Fri Jan 29 14:10:01 2016 -0800

----------------------------------------------------------------------
 .../server/upgrade/UpgradeCatalog221.java       |  7 ++++
 .../server/upgrade/UpgradeCatalog221Test.java   | 43 ++++++++++++++++++++
 2 files changed, 50 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/44c98665/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog221.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog221.java b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog221.java
index 13ec345..0f9d0e0 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog221.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog221.java
@@ -198,6 +198,13 @@ public class UpgradeCatalog221 extends AbstractUpgradeCatalog {
         alertDefinitionDAO.merge(alertDefinition);
       }
 
+      final AlertDefinitionEntity amsZookeeperProcessAlertDefinitionEntity = alertDefinitionDAO.findByName(
+        clusterID, "ams_metrics_collector_zookeeper_server_process");
+
+      if (amsZookeeperProcessAlertDefinitionEntity != null) {
+        LOG.info("Removing alert : ams_metrics_collector_zookeeper_server_process");
+        alertDefinitionDAO.remove(amsZookeeperProcessAlertDefinitionEntity);
+      }
     }
   }
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/44c98665/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog221Test.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog221Test.java b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog221Test.java
index c15fbd8..e754a41 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog221Test.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog221Test.java
@@ -39,7 +39,9 @@ import org.apache.ambari.server.controller.KerberosHelper;
 import org.apache.ambari.server.controller.MaintenanceStateHelper;
 import org.apache.ambari.server.orm.GuiceJpaInitializer;
 import org.apache.ambari.server.orm.InMemoryDefaultTestModule;
+import org.apache.ambari.server.orm.dao.AlertDefinitionDAO;
 import org.apache.ambari.server.orm.dao.StackDAO;
+import org.apache.ambari.server.orm.entities.AlertDefinitionEntity;
 import org.apache.ambari.server.orm.entities.StackEntity;
 import org.apache.ambari.server.state.Cluster;
 import org.apache.ambari.server.state.Clusters;
@@ -560,4 +562,45 @@ public class UpgradeCatalog221Test {
     String result = (String) updateAmsEnvContent.invoke(upgradeCatalog221, oldContent);
     Assert.assertEquals(expectedContent, result);
   }
+
+  @Test
+  public void testUpdateAlertDefinitions() {
+    EasyMockSupport easyMockSupport = new EasyMockSupport();
+    long clusterId = 1;
+
+    final AmbariManagementController mockAmbariManagementController = easyMockSupport.createNiceMock(AmbariManagementController.class);
+    final AlertDefinitionDAO mockAlertDefinitionDAO = easyMockSupport.createNiceMock(AlertDefinitionDAO.class);
+    final Clusters mockClusters = easyMockSupport.createStrictMock(Clusters.class);
+    final Cluster mockClusterExpected = easyMockSupport.createNiceMock(Cluster.class);
+    final AlertDefinitionEntity mockAmsZookeeperProcessAlertDefinitionEntity = easyMockSupport.createNiceMock(AlertDefinitionEntity.class);
+
+    final Injector mockInjector = Guice.createInjector(new AbstractModule() {
+      @Override
+      protected void configure() {
+        bind(AmbariManagementController.class).toInstance(mockAmbariManagementController);
+        bind(Clusters.class).toInstance(mockClusters);
+        bind(EntityManager.class).toInstance(entityManager);
+        bind(AlertDefinitionDAO.class).toInstance(mockAlertDefinitionDAO);
+        bind(DBAccessor.class).toInstance(createNiceMock(DBAccessor.class));
+        bind(OsFamily.class).toInstance(createNiceMock(OsFamily.class));
+      }
+    });
+
+    expect(mockAmbariManagementController.getClusters()).andReturn(mockClusters).once();
+    expect(mockClusters.getClusters()).andReturn(new HashMap<String, Cluster>() {{
+      put("normal", mockClusterExpected);
+    }}).atLeastOnce();
+
+    expect(mockClusterExpected.getClusterId()).andReturn(clusterId).anyTimes();
+
+    expect(mockAlertDefinitionDAO.findByName(eq(clusterId), eq("ams_metrics_collector_zookeeper_server_process")))
+      .andReturn(mockAmsZookeeperProcessAlertDefinitionEntity).atLeastOnce();
+
+    mockAlertDefinitionDAO.remove(mockAmsZookeeperProcessAlertDefinitionEntity);
+    expectLastCall().once();
+
+    easyMockSupport.replayAll();
+    mockInjector.getInstance(UpgradeCatalog221.class).updateAlerts();
+    easyMockSupport.verifyAll();
+  }
 }


[12/39] ambari git commit: AMBARI-14829 Memory leak on Alerts page. (ababiichuk)

Posted by nc...@apache.org.
AMBARI-14829 Memory leak on Alerts page. (ababiichuk)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/f10d41cc
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/f10d41cc
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/f10d41cc

Branch: refs/heads/branch-dev-patch-upgrade
Commit: f10d41ccbc935cdad2627f51a7b3ddba38190abf
Parents: d057581
Author: ababiichuk <ab...@hortonworks.com>
Authored: Thu Jan 28 16:01:19 2016 +0200
Committer: ababiichuk <ab...@hortonworks.com>
Committed: Thu Jan 28 16:01:19 2016 +0200

----------------------------------------------------------------------
 .../main/alert_definitions_controller.js        | 41 ++++++++++++
 .../mappers/alert_definition_summary_mapper.js  |  8 ++-
 .../app/models/alerts/alert_definition.js       |  1 +
 ambari-web/app/templates/main/alerts.hbs        | 32 ++--------
 .../alert_definition/alert_definition_state.hbs | 31 ++++++++++
 .../alert_definition_summary.hbs                | 28 +++++++++
 ambari-web/app/utils/ember_reopen.js            | 14 +++++
 ambari-web/app/views.js                         |  2 +
 ambari-web/app/views/common/sort_view.js        | 19 +++++-
 .../app/views/main/alert_definitions_view.js    | 23 +------
 .../alert_definition/alert_definition_state.js  | 34 ++++++++++
 .../alert_definition_summary.js                 | 65 ++++++++++++++++++++
 12 files changed, 245 insertions(+), 53 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/f10d41cc/ambari-web/app/controllers/main/alert_definitions_controller.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/controllers/main/alert_definitions_controller.js b/ambari-web/app/controllers/main/alert_definitions_controller.js
index 58eecf5..0cfff21 100644
--- a/ambari-web/app/controllers/main/alert_definitions_controller.js
+++ b/ambari-web/app/controllers/main/alert_definitions_controller.js
@@ -28,6 +28,8 @@ App.MainAlertDefinitionsController = Em.ArrayController.extend({
    */
   showFilterConditionsFirstLoad: false,
 
+  contentUpdater: null,
+
   /**
    * List of all <code>App.AlertDefinition</code>
    * @type {App.AlertDefinition[]}
@@ -35,6 +37,45 @@ App.MainAlertDefinitionsController = Em.ArrayController.extend({
   content: App.AlertDefinition.find(),
 
   /**
+   * Generates key for alert summary that represents current state
+   */
+  getSummaryCache: function () {
+    var res = '';
+    this.get('content').forEach(function(o) {
+      var summary = o.get('summary');
+      o.get('order').forEach(function (state) {
+        res += summary[state] ? summary[state].count + summary[state].maintenanceCount : 0;
+      });
+    });
+
+    return res;
+   },
+
+  generateCacheByKey: function(key) {
+    if (key === 'summary') {
+      return this.getSummaryCache();
+    }
+
+    return this.get('content').mapProperty(key).join('');
+  },
+
+  contentWasChanged: function(key) {
+    var updatedCache = this.generateCacheByKey(key);
+    if (this.get('cache.' + key) !== updatedCache) {
+      this.set('cache.' + key, updatedCache);
+      this.propertyDidChange('contentUpdater');
+    }
+  },
+
+  cache: {
+    'label': '',
+    'summary': '',
+    'serviceName': '',
+    'lastTriggered': '',
+    'enabled': ''
+  },
+
+  /**
    * Enable/disable alertDefinition confirmation popup
    * @param {object} event
    * @method toggleState

http://git-wip-us.apache.org/repos/asf/ambari/blob/f10d41cc/ambari-web/app/mappers/alert_definition_summary_mapper.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/mappers/alert_definition_summary_mapper.js b/ambari-web/app/mappers/alert_definition_summary_mapper.js
index 3977518..69f1b16 100644
--- a/ambari-web/app/mappers/alert_definition_summary_mapper.js
+++ b/ambari-web/app/mappers/alert_definition_summary_mapper.js
@@ -56,6 +56,9 @@ App.alertDefinitionSummaryMapper = App.QuickDataMapper.create({
 
     alertDefinitions.forEach(function (d) {
       var id = d.get('id');
+      if ((alertDefinitionsMap[id].get('stateManager.currentState.name') !== 'saved')) {
+        alertDefinitionsMap[id].get('stateManager').transitionTo('saved');
+      }
       alertDefinitionsMap[id].setProperties(summaryMap[id]);
       if (!alertDefinitionsMap[id].get('enabled')) {
         // clear summary for disabled alert definitions
@@ -89,7 +92,10 @@ App.alertDefinitionSummaryMapper = App.QuickDataMapper.create({
         });
       }
     });
-
+    if (!$.mocho) {
+      //for some reasons this causing error in unit test
+      App.store.commit();
+    }
     console.timeEnd('App.alertDefinitionSummaryMapper execution time');
 
   }

http://git-wip-us.apache.org/repos/asf/ambari/blob/f10d41cc/ambari-web/app/models/alerts/alert_definition.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/models/alerts/alert_definition.js b/ambari-web/app/models/alerts/alert_definition.js
index b33cc91..3f59e86 100644
--- a/ambari-web/app/models/alerts/alert_definition.js
+++ b/ambari-web/app/models/alerts/alert_definition.js
@@ -345,3 +345,4 @@ App.AlertDefinition.FIXTURES = [];
 App.AlertReportDefinition.FIXTURES = [];
 App.AlertMetricsSourceDefinition.FIXTURES = [];
 App.AlertMetricsUriDefinition.FIXTURES = [];
+App.AlertDefinitionParameter.FIXTURES = [];

http://git-wip-us.apache.org/repos/asf/ambari/blob/f10d41cc/ambari-web/app/templates/main/alerts.hbs
----------------------------------------------------------------------
diff --git a/ambari-web/app/templates/main/alerts.hbs b/ambari-web/app/templates/main/alerts.hbs
index 40bac06..8a27056 100644
--- a/ambari-web/app/templates/main/alerts.hbs
+++ b/ambari-web/app/templates/main/alerts.hbs
@@ -57,39 +57,15 @@
               <span {{bindAttr title="alertDefinition.type"}} {{bindAttr class=":type-icon  alertDefinition.typeIconClass"}}></span>
               <a href="#" {{action "gotoAlertDetails" alertDefinition}}>{{alertDefinition.label}}</a>
             </td>
-            <td class="alert-status">{{{alertDefinition.status}}}</td>
+            <td class="alert-status">
+              {{view App.AlertDefinitionSummary contentBinding="alertDefinition"}}
+            </td>
             <td class="alert-service">{{alertDefinition.serviceDisplayName}}</td>
             <td class="alert-time">
               <time class="timeago" {{bindAttr data-original-title="alertDefinition.lastTriggeredFormatted"}}>{{alertDefinition.lastTriggeredAgoFormatted}}</time>
             </td>
             <td class="last toggle-state-button alert-state">
-              {{#if alertDefinition.enabled not=true}}
-                {{#isAuthorized "CLUSTER.TOGGLE_ALERTS"}}
-                  <a href="#" {{action "toggleState" alertDefinition target="controller"}} {{bindAttr class="alertDefinition.enabled:alert-definition-enable:alert-definition-disable"}}>
-                  <span class="enable-disable-button" {{bindAttr data-original-title="view.enabledTooltip"}}>
-                    {{view.enabledDisplay}}
-                  </span>
-                  </a>
-                {{/isAuthorized}}
-                {{#isNotAuthorized "CLUSTER.TOGGLE_ALERTS"}}
-                    <span {{bindAttr class="alertDefinition.enabled:alert-definition-enable:alert-definition-disable"}}>
-                      {{view.enabledDisplay}}
-                    </span>
-                {{/isNotAuthorized}}
-              {{else}}
-                {{#isAuthorized "CLUSTER.TOGGLE_ALERTS"}}
-                  <a href="#" {{action "toggleState" alertDefinition target="controller"}} {{bindAttr class="alertDefinition.enabled:alert-definition-enable:alert-definition-disable"}}>
-                  <span class="enable-disable-button" {{bindAttr data-original-title="view.disabledTooltip"}}>
-                    {{view.disabledDisplay}}
-                  </span>
-                  </a>
-                {{/isAuthorized}}
-                {{#isNotAuthorized "CLUSTER.TOGGLE_ALERTS"}}
-                  <span {{bindAttr class="alertDefinition.enabled:alert-definition-enable:alert-definition-disable"}}>
-                    {{view.disabledDisplay}}
-                  </span>
-                {{/isNotAuthorized}}
-              {{/if}}
+              {{view App.AlertDefinitionState contentBinding="alertDefinition"}}
             </td>
           </tr>
         {{/each}}

http://git-wip-us.apache.org/repos/asf/ambari/blob/f10d41cc/ambari-web/app/templates/main/alerts/alert_definition/alert_definition_state.hbs
----------------------------------------------------------------------
diff --git a/ambari-web/app/templates/main/alerts/alert_definition/alert_definition_state.hbs b/ambari-web/app/templates/main/alerts/alert_definition/alert_definition_state.hbs
new file mode 100644
index 0000000..ebc5c04
--- /dev/null
+++ b/ambari-web/app/templates/main/alerts/alert_definition/alert_definition_state.hbs
@@ -0,0 +1,31 @@
+{{!
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+}}
+
+{{#isAuthorized "CLUSTER.TOGGLE_ALERTS"}}
+  <a href="#" {{action "toggleState" view.content target="controller"}}
+    {{bindAttr class="view.content.enabled:alert-definition-enable:alert-definition-disable"}}>
+      <span class="enable-disable-button" {{bindAttr data-original-title="view.tooltipText"}}>
+        {{view.labelText}}
+      </span>
+  </a>
+{{/isAuthorized}}
+{{#isNotAuthorized "CLUSTER.TOGGLE_ALERTS"}}
+  <span {{bindAttr class="view.content.enabled:alert-definition-enable:alert-definition-disable"}}>
+    {{view.labelText}}
+  </span>
+{{/isNotAuthorized}}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/f10d41cc/ambari-web/app/templates/main/alerts/alert_definition/alert_definition_summary.hbs
----------------------------------------------------------------------
diff --git a/ambari-web/app/templates/main/alerts/alert_definition/alert_definition_summary.hbs b/ambari-web/app/templates/main/alerts/alert_definition/alert_definition_summary.hbs
new file mode 100644
index 0000000..930d440
--- /dev/null
+++ b/ambari-web/app/templates/main/alerts/alert_definition/alert_definition_summary.hbs
@@ -0,0 +1,28 @@
+{{!
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+}}
+
+{{#if view.hostCount}}
+  {{#each state in view.states}}
+    <span {{bindAttr class=":alert-state-single-host :label state.stateClass"}}>
+      {{#if state.isMaintenance}}<span class="icon-medkit"></span>{{/if}}
+      {{state.shortStateWithCounter}}
+    </span>
+  {{/each}}
+{{else}}
+  <span class="alert-state-single-host label alert-state-PENDING">NONE</span>
+{{/if}}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/f10d41cc/ambari-web/app/utils/ember_reopen.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/utils/ember_reopen.js b/ambari-web/app/utils/ember_reopen.js
index 0eefb97..512b3da 100644
--- a/ambari-web/app/utils/ember_reopen.js
+++ b/ambari-web/app/utils/ember_reopen.js
@@ -224,6 +224,20 @@ Em.View.reopen({
   }
 });
 
+Ember._HandlebarsBoundView.reopen({
+  /**
+   * overwritten set method of Ember._HandlebarsBoundView to avoid uncaught errors
+   * when trying to set property of destroyed view
+   */
+  render: function(buffer){
+    if(!this.get('isDestroyed') && !this.get('isDestroying')){
+      this._super(buffer);
+    } else {
+      console.debug('Calling set on destroyed view');
+    }
+  }
+});
+
 Ember.TextArea.reopen({
   attributeBindings: ['readonly']
 });

http://git-wip-us.apache.org/repos/asf/ambari/blob/f10d41cc/ambari-web/app/views.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views.js b/ambari-web/app/views.js
index 78b79b4..d3c7fdc 100644
--- a/ambari-web/app/views.js
+++ b/ambari-web/app/views.js
@@ -95,6 +95,8 @@ require('views/login');
 require('views/main');
 require('views/main/menu');
 require('views/main/alert_definitions_view');
+require('views/main/alerts/alert_definition/alert_definition_summary');
+require('views/main/alerts/alert_definition/alert_definition_state');
 require('views/main/alerts/definition_details_view');
 require('views/main/alerts/alert_definitions_actions_view');
 require('views/main/alerts/definition_configs_view');

http://git-wip-us.apache.org/repos/asf/ambari/blob/f10d41cc/ambari-web/app/views/common/sort_view.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views/common/sort_view.js b/ambari-web/app/views/common/sort_view.js
index 08471dd..011ddfc 100644
--- a/ambari-web/app/views/common/sort_view.js
+++ b/ambari-web/app/views/common/sort_view.js
@@ -119,7 +119,7 @@ var wrapperView = Em.View.extend({
         }
       }, this);
     }
-  }.observes('content.length'),
+  }.observes('controller.contentUpdater'),
 
   /**
    * reset all sorts fields
@@ -186,11 +186,24 @@ var wrapperView = Em.View.extend({
   },
 
   /**
+   * method that runs <code>contentWasChanged<code>
+   *
+   * @method onContentChangeOnce
+   */
+  onContentChangeOnce: function() {
+    var keys = arguments[1].match(/[a-zA-Z]+$/),
+      key = keys.length ? keys[0] : null;
+    if (key) {
+      Em.run.once(this.get('controller'), 'contentWasChanged', key);
+    }
+  },
+
+  /**
    * Add observer for key to call  <code>onContentChange</code>
    * @param key
    */
   addSortingObserver: function (key) {
-    this.addObserver('content.@each.' + key, this, 'onContentChange');
+    this.addObserver('controller.content.@each.' + key, this, 'onContentChangeOnce');
   },
 
   /**
@@ -198,7 +211,7 @@ var wrapperView = Em.View.extend({
    * @param key
    */
   removeSortingObserver: function (key) {
-    this.removeObserver('content.@each.' + key, this, 'onContentChange');
+    this.removeObserver('controller.content.@each.' + key, this, 'onContentChangeOnce');
   },
 
   willDestroyElement: function () {

http://git-wip-us.apache.org/repos/asf/ambari/blob/f10d41cc/ambari-web/app/views/main/alert_definitions_view.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views/main/alert_definitions_view.js b/ambari-web/app/views/main/alert_definitions_view.js
index 8056347..b65b8cf 100644
--- a/ambari-web/app/views/main/alert_definitions_view.js
+++ b/ambari-web/app/views/main/alert_definitions_view.js
@@ -52,6 +52,7 @@ App.MainAlertDefinitionsView = App.TableView.extend({
   },
 
   willDestroyElement: function () {
+    $(".timeago").tooltip('destroy');
     this.removeObserver('pageContent.length', this, 'tooltipsUpdater');
   },
 
@@ -78,26 +79,6 @@ App.MainAlertDefinitionsView = App.TableView.extend({
 
   colPropAssoc: ['', 'label', 'summary', 'serviceName', 'type', 'lastTriggered', 'enabled', 'groups'],
 
-  /**
-   * @type {string}
-   */
-  enabledTooltip: Em.I18n.t('alerts.table.state.enabled.tooltip'),
-
-  /**
-   * @type {string}
-   */
-  disabledTooltip: Em.I18n.t('alerts.table.state.disabled.tooltip'),
-
-  /**
-   * @type {string}
-   */
-  enabledDisplay: Em.I18n.t('alerts.table.state.enabled'),
-
-  /**
-   * @type {string}
-   */
-  disabledDisplay: Em.I18n.t('alerts.table.state.disabled'),
-
   sortView: sort.wrapperView.extend({
     didInsertElement: function () {
       this._super();
@@ -500,7 +481,7 @@ App.MainAlertDefinitionsView = App.TableView.extend({
    */
   tooltipsUpdater: function () {
     Em.run.next(this, function () {
-      App.tooltip($(".enable-disable-button, .timeago"));
+      App.tooltip($(".timeago"));
     });
   },
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/f10d41cc/ambari-web/app/views/main/alerts/alert_definition/alert_definition_state.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views/main/alerts/alert_definition/alert_definition_state.js b/ambari-web/app/views/main/alerts/alert_definition/alert_definition_state.js
new file mode 100644
index 0000000..e3d60a6
--- /dev/null
+++ b/ambari-web/app/views/main/alerts/alert_definition/alert_definition_state.js
@@ -0,0 +1,34 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+var App = require('app');
+
+App.AlertDefinitionState = Em.View.extend({
+
+  templateName: require('templates/main/alerts/alert_definition/alert_definition_state'),
+
+  labelText: Em.computed.ifThenElse('content.enabled', Em.I18n.t('alerts.table.state.enabled'), Em.I18n.t('alerts.table.state.disabled')),
+
+  tooltipText: Em.computed.ifThenElse('content.enabled', Em.I18n.t('alerts.table.state.enabled.tooltip'), Em.I18n.t('alerts.table.state.disabled.tooltip')),
+
+  didInsertElement: function () {
+    App.tooltip(this.$(".enable-disable-button"));
+  },
+  willDestroyElement:function () {
+    this.$(".enable-disable-button").tooltip('destroy');
+  }
+});
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/f10d41cc/ambari-web/app/views/main/alerts/alert_definition/alert_definition_summary.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views/main/alerts/alert_definition/alert_definition_summary.js b/ambari-web/app/views/main/alerts/alert_definition/alert_definition_summary.js
new file mode 100644
index 0000000..ecf262c
--- /dev/null
+++ b/ambari-web/app/views/main/alerts/alert_definition/alert_definition_summary.js
@@ -0,0 +1,65 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+var App = require('app');
+
+App.AlertDefinitionSummary = Em.View.extend({
+
+  templateName: require('templates/main/alerts/alert_definition/alert_definition_summary'),
+
+  didInsertElement: function() {
+    this.stateObserver();
+  },
+
+  hostCount: 0,
+  states: [],
+
+  stateObserver: function () {
+    var order = this.get('content.order'),
+      summary = this.get('content.summary'),
+      shortState = this.get('content.shortState');
+
+    var hostCnt = 0;
+    order.forEach(function (state) {
+      hostCnt += summary[state] ? summary[state].count + summary[state].maintenanceCount : 0;
+    });
+    var states = [];
+    if (hostCnt) {
+      order.forEach(function (state) {
+        if (summary[state]) {
+          if (summary[state].count) {
+            states.push({
+              'shortStateWithCounter': shortState[state] + (summary[state].count > 1 ? ' (' + summary[state].count + ')' : ''),
+              'isMaintenance': false,
+              'stateClass': 'alert-state-' + state
+            });
+          }
+          if (summary[state].maintenanceCount) {
+            states.push({
+              'shortStateWithCounter': shortState[state] + (summary[state].maintenanceCount > 1 ? ' (' + summary[state].maintenanceCount + ')' : ''),
+              'isMaintenance': true,
+              'stateClass': 'alert-state-PENDING'
+            });
+          }
+        }
+      }, this);
+    }
+    this.set('hostCount', hostCnt);
+    this.set('states', states);
+  }.observes('content.summary')
+
+});
\ No newline at end of file


[02/39] ambari git commit: AMBARI-14815 Unit tests for Oozie are not executed (test run is successful) (dsen)

Posted by nc...@apache.org.
AMBARI-14815 Unit tests for Oozie are not executed (test run is successful) (dsen)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/ce725290
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/ce725290
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/ce725290

Branch: refs/heads/branch-dev-patch-upgrade
Commit: ce7252909acddde2d8872e4944e1bd52148dab03
Parents: 54311b7
Author: Dmytro Sen <ds...@apache.org>
Authored: Wed Jan 27 18:33:34 2016 +0200
Committer: Dmytro Sen <ds...@apache.org>
Committed: Wed Jan 27 18:33:34 2016 +0200

----------------------------------------------------------------------
 .../OOZIE/4.0.0.2.0/package/scripts/oozie.py    |   2 +-
 .../stacks/2.0.6/OOZIE/test_oozie_client.py     |   6 +-
 .../stacks/2.0.6/OOZIE/test_oozie_server.py     | 117 +++++++++++++------
 .../stacks/2.0.6/OOZIE/test_service_check.py    |   4 -
 ambari-server/src/test/python/unitTests.py      |  14 ++-
 5 files changed, 95 insertions(+), 48 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/ce725290/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie.py b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie.py
index 5fc2cfb..24a35ad 100644
--- a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie.py
+++ b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie.py
@@ -116,7 +116,7 @@ def oozie(is_server=False):
 
   # On some OS this folder could be not exists, so we will create it before pushing there files
   Directory(params.limits_conf_dir,
-            recursive=True,
+            create_parents=True,
             owner='root',
             group='root'
   )

http://git-wip-us.apache.org/repos/asf/ambari/blob/ce725290/ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_oozie_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_oozie_client.py b/ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_oozie_client.py
index 4c1a9bc..aab9bb0 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_oozie_client.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_oozie_client.py
@@ -55,7 +55,7 @@ class TestOozieClient(RMFTestCase):
     self.assertResourceCalled('Directory', '/etc/security/limits.d',
                               owner = 'root',
                               group = 'root',
-                              recursive=True,
+                              create_parents=True,
                               )
     self.assertResourceCalled('File', '/etc/security/limits.d/oozie.conf',
                               owner = 'root',
@@ -121,7 +121,7 @@ class TestOozieClient(RMFTestCase):
     self.assertResourceCalled('Directory', '/etc/security/limits.d',
                               owner = 'root',
                               group = 'root',
-                              recursive=True,
+                              create_parents=True,
                               )
     self.assertResourceCalled('File', '/etc/security/limits.d/oozie.conf',
                               owner = 'root',
@@ -193,7 +193,7 @@ class TestOozieClient(RMFTestCase):
     self.assertResourceCalled('Directory', '/etc/security/limits.d',
                               owner = 'root',
                               group = 'root',
-                              recursive=True,
+                              create_parents=True,
                               )
     self.assertResourceCalled('File', '/etc/security/limits.d/oozie.conf',
                               owner = 'root',

http://git-wip-us.apache.org/repos/asf/ambari/blob/ce725290/ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_oozie_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_oozie_server.py b/ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_oozie_server.py
index d167ef3..2183425 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_oozie_server.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_oozie_server.py
@@ -108,7 +108,7 @@ class TestOozieServer(RMFTestCase):
     self.assertResourceCalled('Directory', '/etc/security/limits.d',
                               owner = 'root',
                               group = 'root',
-                              recursive=True,
+                              create_parents=True,
                               )
     self.assertResourceCalled('File', '/etc/security/limits.d/oozie.conf',
                               owner = 'root',
@@ -227,9 +227,11 @@ class TestOozieServer(RMFTestCase):
         not_if = "ambari-sudo.sh su oozie -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ls /var/run/oozie/oozie.pid >/dev/null 2>&1 && ps -p `cat /var/run/oozie/oozie.pid` >/dev/null 2>&1'",
         sudo = True,
     )
-    self.assertResourceCalled('Execute', ('chown', '-RL', 'oozie:hadoop', '/var/lib/oozie/oozie-server/conf'),
-        not_if = "ambari-sudo.sh su oozie -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ls /var/run/oozie/oozie.pid >/dev/null 2>&1 && ps -p `cat /var/run/oozie/oozie.pid` >/dev/null 2>&1'",
-        sudo = True,
+    self.assertResourceCalled('Directory', '/var/lib/oozie/oozie-server/conf',
+                              owner = 'oozie',
+                              group = 'hadoop',
+                              recursion_follow_links = True,
+                              recursive_ownership = True,
     )
     self.assertResourceCalled('File', '/tmp/mysql-connector-java.jar',
         content = DownloadSource('http://c6401.ambari.apache.org:8080/resources//mysql-jdbc-driver.jar'),
@@ -263,8 +265,10 @@ class TestOozieServer(RMFTestCase):
         content = 'cd /var/tmp/oozie && /usr/lib/oozie/bin/oozie-setup.sh prepare-war ',
         mode = 0644,
     )
-    self.assertResourceCalled('Execute', ('chown', '-R', 'oozie:hadoop', '/var/lib/oozie/oozie-server'),
-        sudo = True,
+    self.assertResourceCalled('Directory', '/var/lib/oozie/oozie-server',
+                              owner = 'oozie',
+                              group = 'hadoop',
+                              recursive_ownership = True,
     )
 
 
@@ -327,7 +331,7 @@ class TestOozieServer(RMFTestCase):
     self.assertResourceCalled('Directory', '/etc/security/limits.d',
                               owner = 'root',
                               group = 'root',
-                              recursive=True,
+                              create_parents=True,
                               )
     self.assertResourceCalled('File', '/etc/security/limits.d/oozie.conf',
                               owner = 'root',
@@ -443,9 +447,11 @@ class TestOozieServer(RMFTestCase):
                               not_if = "ambari-sudo.sh su oozie -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ls /var/run/oozie/oozie.pid >/dev/null 2>&1 && ps -p `cat /var/run/oozie/oozie.pid` >/dev/null 2>&1'",
                               sudo = True,
                               )
-    self.assertResourceCalled('Execute', ('chown', '-RL', u'oozie:hadoop', '/var/lib/oozie/oozie-server/conf'),
-                              not_if = "ambari-sudo.sh su oozie -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ls /var/run/oozie/oozie.pid >/dev/null 2>&1 && ps -p `cat /var/run/oozie/oozie.pid` >/dev/null 2>&1'",
-                              sudo = True,
+    self.assertResourceCalled('Directory', '/var/lib/oozie/oozie-server/conf',
+                              owner = 'oozie',
+                              group = 'hadoop',
+                              recursion_follow_links = True,
+                              recursive_ownership = True,
                               )
     self.assertResourceCalled('File', '/tmp/sqla-client-jdbc.tar.gz',
                               content = DownloadSource('http://c6401.ambari.apache.org:8080/resources//sqlanywhere-jdbc-driver.tar.gz'),
@@ -481,9 +487,11 @@ class TestOozieServer(RMFTestCase):
                               content = 'cd /var/tmp/oozie && /usr/lib/oozie/bin/oozie-setup.sh prepare-war ',
                               mode = 0644,
                               )
-    self.assertResourceCalled('Execute', ('chown', '-R', u'oozie:hadoop', '/var/lib/oozie/oozie-server'),
-                              sudo = True,
-                              )
+    self.assertResourceCalled('Directory', '/var/lib/oozie/oozie-server',
+                              owner = 'oozie',
+                              group = 'hadoop',
+                              recursive_ownership = True,
+    )
     self.assertNoMoreResources()
 
 
@@ -698,7 +706,7 @@ class TestOozieServer(RMFTestCase):
     self.assertResourceCalled('Directory', '/etc/security/limits.d',
                               owner = 'root',
                               group = 'root',
-                              recursive=True,
+                              create_parents=True,
                               )
     self.assertResourceCalled('File', '/etc/security/limits.d/oozie.conf',
                               owner = 'root',
@@ -814,9 +822,11 @@ class TestOozieServer(RMFTestCase):
         not_if = "ambari-sudo.sh su oozie -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ls /var/run/oozie/oozie.pid >/dev/null 2>&1 && ps -p `cat /var/run/oozie/oozie.pid` >/dev/null 2>&1'",
         sudo = True,
     )
-    self.assertResourceCalled('Execute', ('chown', '-RL', 'oozie:hadoop', '/var/lib/oozie/oozie-server/conf'),
-        not_if = "ambari-sudo.sh su oozie -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ls /var/run/oozie/oozie.pid >/dev/null 2>&1 && ps -p `cat /var/run/oozie/oozie.pid` >/dev/null 2>&1'",
-        sudo = True,
+    self.assertResourceCalled('Directory', '/var/lib/oozie/oozie-server/conf',
+                              owner = 'oozie',
+                              group = 'hadoop',
+                              recursion_follow_links = True,
+                              recursive_ownership = True,
     )
     self.assertResourceCalled('Execute', 'ambari-sudo.sh cp /usr/lib/falcon/oozie/ext/falcon-oozie-el-extension-*.jar /usr/lib/oozie/libext',
         not_if = "ambari-sudo.sh su oozie -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ls /var/run/oozie/oozie.pid >/dev/null 2>&1 && ps -p `cat /var/run/oozie/oozie.pid` >/dev/null 2>&1'",
@@ -836,8 +846,10 @@ class TestOozieServer(RMFTestCase):
         content = 'cd /var/tmp/oozie && /usr/lib/oozie/bin/oozie-setup.sh prepare-war ',
         mode = 0644,
     )
-    self.assertResourceCalled('Execute', ('chown', '-R', 'oozie:hadoop', '/var/lib/oozie/oozie-server'),
-        sudo = True,
+    self.assertResourceCalled('Directory', '/var/lib/oozie/oozie-server',
+                              owner = 'oozie',
+                              group = 'hadoop',
+                              recursive_ownership = True,
     )
 
 
@@ -888,7 +900,7 @@ class TestOozieServer(RMFTestCase):
     self.assertResourceCalled('Directory', '/etc/security/limits.d',
                               owner = 'root',
                               group = 'root',
-                              recursive=True,
+                              create_parents=True,
                               )
     self.assertResourceCalled('File', '/etc/security/limits.d/oozie.conf',
                               owner = 'root',
@@ -1004,9 +1016,11 @@ class TestOozieServer(RMFTestCase):
         not_if = "ambari-sudo.sh su oozie -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ls /var/run/oozie/oozie.pid >/dev/null 2>&1 && ps -p `cat /var/run/oozie/oozie.pid` >/dev/null 2>&1'",
         sudo = True,
     )
-    self.assertResourceCalled('Execute', ('chown', '-RL', 'oozie:hadoop', '/var/lib/oozie/oozie-server/conf'),
-        not_if = "ambari-sudo.sh su oozie -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ls /var/run/oozie/oozie.pid >/dev/null 2>&1 && ps -p `cat /var/run/oozie/oozie.pid` >/dev/null 2>&1'",
-        sudo = True,
+    self.assertResourceCalled('Directory', '/var/lib/oozie/oozie-server/conf',
+                              owner = 'oozie',
+                              group = 'hadoop',
+                              recursion_follow_links = True,
+                              recursive_ownership = True,
     )
     self.assertResourceCalled('Execute', 'ambari-sudo.sh cp /usr/lib/falcon/oozie/ext/falcon-oozie-el-extension-*.jar /usr/lib/oozie/libext',
         not_if = "ambari-sudo.sh su oozie -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ls /var/run/oozie/oozie.pid >/dev/null 2>&1 && ps -p `cat /var/run/oozie/oozie.pid` >/dev/null 2>&1'",
@@ -1026,8 +1040,10 @@ class TestOozieServer(RMFTestCase):
         content = 'cd /var/tmp/oozie && /usr/lib/oozie/bin/oozie-setup.sh prepare-war -secure',
         mode = 0644,
     )
-    self.assertResourceCalled('Execute', ('chown', '-R', 'oozie:hadoop', '/var/lib/oozie/oozie-server'),
-        sudo = True,
+    self.assertResourceCalled('Directory', '/var/lib/oozie/oozie-server',
+                              owner = 'oozie',
+                              group = 'hadoop',
+                              recursive_ownership = True,
     )
 
     def test_configure_default_hdp22(self):
@@ -1221,15 +1237,19 @@ class TestOozieServer(RMFTestCase):
 
     self.assertResourceCalled('Execute',
       ('tar', '-zcvhf', '/tmp/oozie-upgrade-backup/oozie-conf-backup.tar', '/usr/hdp/current/oozie-server/conf/'),
-      sudo = True)
-
+      sudo = True,
+      tries = 3,
+      try_sleep = 1
+    )
     self.assertResourceCalled('Execute', ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'oozie-server', u'2.2.1.0-2135'),
       sudo = True )
 
     self.assertResourceCalled('Execute',
       ('tar', '-xvf', '/tmp/oozie-upgrade-backup/oozie-conf-backup.tar', '-C', '/usr/hdp/current/oozie-server/conf//'),
-        sudo = True)
-
+      sudo = True,
+      tries = 3,
+      try_sleep = 1
+    )
     self.assertResourceCalled('Directory', '/tmp/oozie-upgrade-backup', action = ['delete'])
     self.assertResourceCalled('Directory', '/usr/hdp/current/oozie-server/libext', mode = 0777)
     self.assertResourceCalled('Execute', ('cp', '/usr/share/HDP-oozie/ext-2.2.zip', '/usr/hdp/current/oozie-server/libext'), sudo=True)
@@ -1289,13 +1309,21 @@ class TestOozieServer(RMFTestCase):
 
     self.assertResourceCalled('Execute',
       ('tar', '-zcvhf', '/tmp/oozie-upgrade-backup/oozie-conf-backup.tar', '/usr/hdp/current/oozie-server/conf/'),
-      sudo = True)
-
+      sudo = True,
+      tries = 3,
+      try_sleep = 1
+    )
+    self.assertResourceCalled('Link', '/etc/oozie/conf',
+                              to = '/usr/hdp/current/oozie-client/conf',
+    )
     self.assertResourceCalled('Execute', ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'oozie-server', '2.3.0.0-1234'), sudo = True)
 
     self.assertResourceCalled('Execute',
       ('tar', '-xvf', '/tmp/oozie-upgrade-backup/oozie-conf-backup.tar', '-C', '/usr/hdp/current/oozie-server/conf//'),
-      sudo = True)
+      sudo = True,
+      tries = 3,
+      try_sleep = 1
+    )
 
     self.assertResourceCalled('Directory', '/tmp/oozie-upgrade-backup', action = ['delete'])
     self.assertResourceCalled('Directory', '/usr/hdp/current/oozie-server/libext', mode = 0777)
@@ -1347,13 +1375,18 @@ class TestOozieServer(RMFTestCase):
 
     self.assertResourceCalled('Execute',
       ('tar', '-zcvhf', '/tmp/oozie-upgrade-backup/oozie-conf-backup.tar', '/usr/hdp/current/oozie-server/conf/'),
-      sudo = True)
-
+      sudo = True,
+      tries = 3,
+      try_sleep = 1
+    )
     self.assertResourceCalled('Execute', ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'oozie-server', u'2.2.0.0-0000'), sudo = True)
 
     self.assertResourceCalled('Execute',
       ('tar', '-xvf', '/tmp/oozie-upgrade-backup/oozie-conf-backup.tar', '-C', '/usr/hdp/current/oozie-server/conf//'),
-      sudo = True)
+      sudo = True,
+      tries = 3,
+      try_sleep = 1
+    )
 
     self.assertResourceCalled('Directory', '/tmp/oozie-upgrade-backup', action = ['delete'])
     self.assertResourceCalled('Directory', '/usr/hdp/current/oozie-server/libext',mode = 0777)
@@ -1547,13 +1580,21 @@ class TestOozieServer(RMFTestCase):
 
     self.assertResourceCalled('Execute',
       ('tar', '-zcvhf', '/tmp/oozie-upgrade-backup/oozie-conf-backup.tar', '/usr/hdp/current/oozie-server/conf/'),
-      sudo = True)
-
+      sudo = True,
+      tries = 3,
+      try_sleep = 1
+    )
+    self.assertResourceCalled('Link', '/etc/oozie/conf',
+                              to = '/usr/hdp/current/oozie-client/conf',
+    )
     self.assertResourceCalled('Execute', ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'oozie-server', '2.3.0.0-1234'), sudo = True)
 
     self.assertResourceCalled('Execute',
       ('tar', '-xvf', '/tmp/oozie-upgrade-backup/oozie-conf-backup.tar', '-C', '/usr/hdp/current/oozie-server/conf//'),
-      sudo = True)
+      sudo = True,
+      tries = 3,
+      try_sleep = 1
+    )
 
     self.assertResourceCalled('Directory', '/tmp/oozie-upgrade-backup', action = ['delete'])
     self.assertResourceCalled('Directory', '/usr/hdp/current/oozie-server/libext', mode = 0777)

http://git-wip-us.apache.org/repos/asf/ambari/blob/ce725290/ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_service_check.py b/ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_service_check.py
index 4033534..a6d0145 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_service_check.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_service_check.py
@@ -91,7 +91,6 @@ class TestServiceCheck(RMFTestCase):
         dfs_type = '',
         action = ['delete_on_execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name=UnknownConfigurationMock(), default_fs='hdfs://c6401.ambari.apache.org:8020',
         hadoop_conf_dir = '/etc/hadoop/conf',
-        dfs_type = '',
         type = 'directory',
     )
     self.assertResourceCalled('HdfsResource', '/user/ambari-qa/examples',
@@ -106,7 +105,6 @@ class TestServiceCheck(RMFTestCase):
         hadoop_conf_dir = '/etc/hadoop/conf',
         type = 'directory',
         owner = 'ambari-qa',
-        dfs_type = '',
         group = 'hadoop'
     )
     self.assertResourceCalled('HdfsResource', '/user/ambari-qa/input-data',
@@ -118,7 +116,6 @@ class TestServiceCheck(RMFTestCase):
         dfs_type = '',
         action = ['delete_on_execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name=UnknownConfigurationMock(), default_fs='hdfs://c6401.ambari.apache.org:8020',
         hadoop_conf_dir = '/etc/hadoop/conf',
-        dfs_type = '',
         type = 'directory',
     )
     self.assertResourceCalled('HdfsResource', '/user/ambari-qa/input-data',
@@ -133,7 +130,6 @@ class TestServiceCheck(RMFTestCase):
         hadoop_conf_dir = '/etc/hadoop/conf',
         type = 'directory',
         owner = 'ambari-qa',
-        dfs_type = '',
         group = 'hadoop'
     )
     self.assertResourceCalled('HdfsResource', None,

http://git-wip-us.apache.org/repos/asf/ambari/blob/ce725290/ambari-server/src/test/python/unitTests.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/unitTests.py b/ambari-server/src/test/python/unitTests.py
index 7bc258d..6368ca1 100644
--- a/ambari-server/src/test/python/unitTests.py
+++ b/ambari-server/src/test/python/unitTests.py
@@ -20,6 +20,7 @@ import unittest
 import multiprocessing
 import os
 import sys
+import traceback
 from Queue import Empty
 from random import shuffle
 import fnmatch
@@ -114,8 +115,17 @@ def stack_test_executor(base_folder, service, stack, custom_tests, executor_resu
   #TODO Add an option to randomize the tests' execution
   #shuffle(tests)
   modules = [os.path.basename(s)[:-3] for s in tests]
-  suites = [unittest.defaultTestLoader.loadTestsFromName(name) for name in
-    modules]
+  try:
+    suites = [unittest.defaultTestLoader.loadTestsFromName(name) for name in
+      modules]
+  except:
+    executor_result.put({'exit_code': 1,
+                         'tests_run': 0,
+                         'errors': [("Failed to load test files {0}".format(str(modules)), traceback.format_exc(), "ERROR")],
+                         'failures': []})
+    executor_result.put(1)
+    return
+
   testSuite = unittest.TestSuite(suites)
   textRunner = unittest.TextTestRunner(verbosity=2).run(testSuite)
 


[30/39] ambari git commit: AMBARI-14835: Return 409 instead of 500 for duplicate setting creation (Ajit Kumar via smnaha)

Posted by nc...@apache.org.
AMBARI-14835: Return 409 instead of 500 for duplicate setting creation (Ajit Kumar via smnaha)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/528e8ea5
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/528e8ea5
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/528e8ea5

Branch: refs/heads/branch-dev-patch-upgrade
Commit: 528e8ea5e05d98866e14dcfbe2d2becf6b7ceb7e
Parents: 1be4a2d
Author: Nahappan Somasundaram <ns...@hortonworks.com>
Authored: Fri Jan 29 15:50:00 2016 -0800
Committer: Nahappan Somasundaram <ns...@hortonworks.com>
Committed: Fri Jan 29 17:16:42 2016 -0800

----------------------------------------------------------------------
 .../internal/AdminSettingResourceProvider.java       |  5 +++++
 .../internal/AdminSettingResourceProviderTest.java   | 15 +++++++++++++--
 .../ambari/server/orm/dao/AdminSettingDAOTest.java   |  4 ++++
 3 files changed, 22 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/528e8ea5/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AdminSettingResourceProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AdminSettingResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AdminSettingResourceProvider.java
index f62958f..e8d5b5e 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AdminSettingResourceProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AdminSettingResourceProvider.java
@@ -19,6 +19,7 @@ package org.apache.ambari.server.controller.internal;
 
 import com.google.inject.Inject;
 import org.apache.ambari.server.AmbariException;
+import org.apache.ambari.server.DuplicateResourceException;
 import org.apache.ambari.server.StaticallyInject;
 import org.apache.ambari.server.controller.spi.NoSuchParentResourceException;
 import org.apache.ambari.server.controller.spi.NoSuchResourceException;
@@ -171,6 +172,10 @@ public class AdminSettingResourceProvider extends AbstractAuthorizedResourceProv
       @Override
       public AdminSettingEntity invoke() throws AmbariException, AuthorizationException {
         AdminSettingEntity entity = toEntity(properties);
+        if (dao.findByName(entity.getName()) != null) {
+          throw new DuplicateResourceException(
+                  String.format("Setting already exists. setting name :%s ", entity.getName()));
+        }
         dao.create(entity);
         notifyCreate(Resource.Type.AdminSetting, request);
         return entity;

http://git-wip-us.apache.org/repos/asf/ambari/blob/528e8ea5/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/AdminSettingResourceProviderTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/AdminSettingResourceProviderTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/AdminSettingResourceProviderTest.java
index 68eaf28..e95449c 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/AdminSettingResourceProviderTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/AdminSettingResourceProviderTest.java
@@ -18,10 +18,12 @@
 package org.apache.ambari.server.controller.internal;
 
 import com.google.common.collect.Lists;
+import org.apache.ambari.server.DuplicateResourceException;
 import org.apache.ambari.server.controller.spi.Predicate;
 import org.apache.ambari.server.controller.spi.Request;
 import org.apache.ambari.server.controller.spi.RequestStatus;
 import org.apache.ambari.server.controller.spi.Resource;
+import org.apache.ambari.server.controller.spi.ResourceAlreadyExistsException;
 import org.apache.ambari.server.controller.utilities.PredicateBuilder;
 import org.apache.ambari.server.controller.utilities.PropertyHelper;
 import org.apache.ambari.server.orm.dao.AdminSettingDAO;
@@ -190,6 +192,7 @@ public class AdminSettingResourceProviderTest {
     Capture<AdminSettingEntity> entityCapture = Capture.newInstance();
     Request request = createRequest(entity);
 
+    expect(dao.findByName(entity.getName())).andReturn(null);
     dao.create(capture(entityCapture));
     mockControl.replay();
 
@@ -204,6 +207,16 @@ public class AdminSettingResourceProviderTest {
     assertEquals(AuthorizationHelper.getAuthenticatedName(), capturedEntity.getUpdatedBy());
   }
 
+  @Test(expected = ResourceAlreadyExistsException.class)
+  public void testCreateDuplicateResource() throws Exception {
+    setupAuthenticationForAdmin();
+    AdminSettingEntity entity = newEntity("motd");
+    Request request = createRequest(entity);
+
+    expect(dao.findByName(entity.getName())).andReturn(entity);
+    mockControl.replay();
+    resourceProvider.createResources(request);
+  }
 
   @Test(expected = AuthorizationException.class)
   public void testUpdateResources_noAuth() throws Exception {
@@ -211,7 +224,6 @@ public class AdminSettingResourceProviderTest {
     resourceProvider.updateResources(updateRequest(newEntity("motd")), null);
   }
 
-
   @Test(expected = AuthorizationException.class)
   public void testUpdateResources_clusterUser() throws Exception {
     setupAuthenticationForClusterUser();
@@ -219,7 +231,6 @@ public class AdminSettingResourceProviderTest {
     resourceProvider.updateResources(updateRequest(newEntity("motd")), null);
   }
 
-
   @Test
   public void testUpdateResources_admin() throws Exception {
     setupAuthenticationForAdmin();

http://git-wip-us.apache.org/repos/asf/ambari/blob/528e8ea5/ambari-server/src/test/java/org/apache/ambari/server/orm/dao/AdminSettingDAOTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/orm/dao/AdminSettingDAOTest.java b/ambari-server/src/test/java/org/apache/ambari/server/orm/dao/AdminSettingDAOTest.java
index eabbb77..8258974 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/orm/dao/AdminSettingDAOTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/orm/dao/AdminSettingDAOTest.java
@@ -31,6 +31,7 @@ import java.util.Map;
 import java.util.Objects;
 
 import static junit.framework.Assert.assertEquals;
+import static junit.framework.Assert.assertNull;
 
 public class AdminSettingDAOTest {
   private  Injector injector;
@@ -63,6 +64,9 @@ public class AdminSettingDAOTest {
     retrieveAndValidateSame(entities);
     assertEquals(entities.size(), dao.findAll().size());
 
+    //Should return null if doesn't exist.
+    assertNull(dao.findByName("does-not-exist"));
+
 
     //Update
     for(Map.Entry<String, AdminSettingEntity> entry : entities.entrySet()) {


[39/39] ambari git commit: Merge branch 'trunk' into branch-dev-patch-upgrade

Posted by nc...@apache.org.
Merge branch 'trunk' into branch-dev-patch-upgrade


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/73aee31e
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/73aee31e
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/73aee31e

Branch: refs/heads/branch-dev-patch-upgrade
Commit: 73aee31ef67348bbfc942bdf719c6952964a82ab
Parents: e7bdb7b 3ab6a3a
Author: Nate Cole <nc...@hortonworks.com>
Authored: Mon Feb 1 10:41:26 2016 -0500
Committer: Nate Cole <nc...@hortonworks.com>
Committed: Mon Feb 1 10:41:26 2016 -0500

----------------------------------------------------------------------
 .../main/resources/ui/admin-web/app/index.html  |   1 +
 .../controllers/clusters/UserAccessListCtrl.js  |   2 +-
 .../loginActivities/LoginMessageMainCtrl.js     |  67 ++
 .../ui/admin-web/app/scripts/i18n.config.js     |   6 +-
 .../resources/ui/admin-web/app/styles/main.css  |   6 +
 .../app/views/clusters/userAccessList.html      |   4 +-
 .../app/views/loginActivities/loginMessage.html |  50 +-
 ambari-agent/conf/unix/ambari-agent             |  13 +
 ambari-agent/etc/init/ambari-agent.conf         |  34 +
 ambari-agent/pom.xml                            | 479 +---------
 ambari-agent/src/packages/tarball/all.xml       | 168 +++-
 .../resource_management/core/global_lock.py     |  46 +
 .../libraries/functions/curl_krb_request.py     |  47 +-
 .../libraries/functions/hive_check.py           |  23 +-
 .../sink/timeline/MetadataException.java        |  28 +
 .../sink/timeline/TimelineMetricMetadata.java   | 163 ++++
 .../timeline/HBaseTimelineMetricStore.java      |  73 +-
 .../metrics/timeline/PhoenixHBaseAccessor.java  | 253 +++++-
 .../timeline/TimelineMetricConfiguration.java   |   9 +
 .../metrics/timeline/TimelineMetricStore.java   |  22 +-
 .../TimelineMetricAggregatorFactory.java        |   5 +-
 .../TimelineMetricAppAggregator.java            |  28 +-
 .../TimelineMetricClusterAggregatorSecond.java  |   5 +-
 .../discovery/TimelineMetricMetadataKey.java    |  56 ++
 .../TimelineMetricMetadataManager.java          | 187 ++++
 .../discovery/TimelineMetricMetadataSync.java   | 105 +++
 .../timeline/query/PhoenixTransactSQL.java      |  37 +-
 .../webapp/TimelineWebServices.java             |  59 +-
 .../TestApplicationHistoryServer.java           |  11 +-
 .../timeline/AbstractMiniHBaseClusterTest.java  |   8 +-
 .../timeline/ITPhoenixHBaseAccessor.java        |   9 +-
 .../timeline/TestTimelineMetricStore.java       |  14 +
 .../aggregators/ITClusterAggregator.java        |  16 +-
 .../timeline/discovery/TestMetadataManager.java | 112 +++
 ambari-server/pom.xml                           | 883 +------------------
 ambari-server/src/main/assemblies/server.xml    | 348 ++++++--
 .../internal/AdminSettingResourceProvider.java  |   5 +
 .../AlertDefinitionResourceProvider.java        |   4 +-
 .../internal/AlertResourceProvider.java         |   6 +-
 .../AmbariAuthorizationFilter.java              |   3 +-
 .../server/upgrade/UpgradeCatalog210.java       |   4 +-
 .../server/upgrade/UpgradeCatalog221.java       |   8 +
 .../server/upgrade/UpgradeCatalog222.java       |  15 +
 .../HAWQ/2.0.0/configuration/hawq-site.xml      |  14 +
 .../package/alerts/alert_hive_metastore.py      |  15 +-
 .../package/alerts/alert_webhcat_server.py      |   6 -
 .../package/alerts/alert_check_oozie_server.py  |  15 +-
 .../OOZIE/4.0.0.2.0/package/scripts/oozie.py    |   2 +-
 .../SPARK/1.2.0.2.2/package/scripts/params.py   |   2 +-
 .../YARN/2.1.0.2.0/kerberos.json                |   4 +-
 .../services/HIVE/configuration/hive-site.xml   |   2 -
 .../stacks/HDP/2.2/services/YARN/kerberos.json  |   4 +-
 .../HDP/2.3/services/ACCUMULO/widgets.json      |   4 +-
 .../stacks/HDP/2.3/services/YARN/kerberos.json  |   4 +-
 .../stacks/HDP/2.3/services/stack_advisor.py    |   9 +-
 .../HDP/2.3/upgrades/nonrolling-upgrade-2.4.xml |   5 +
 .../stacks/HDP/2.3/upgrades/upgrade-2.4.xml     |  80 +-
 .../SPARK/configuration/spark-defaults.xml      |   4 +-
 .../configuration/spark-thrift-sparkconf.xml    |  31 +-
 .../stacks/HDP/2.4/upgrades/config-upgrade.xml  |  26 +-
 .../AdminSettingResourceProviderTest.java       |  15 +-
 .../server/orm/dao/AdminSettingDAOTest.java     |   4 +
 .../ambari/server/orm/dao/AlertsDAOTest.java    |  38 +
 .../server/upgrade/UpgradeCatalog221Test.java   |  47 +
 .../server/upgrade/UpgradeCatalog222Test.java   |   6 +-
 ambari-server/src/test/python/TestGlobalLock.py |  55 ++
 .../stacks/2.0.6/OOZIE/test_oozie_client.py     |   6 +-
 .../stacks/2.0.6/OOZIE/test_oozie_server.py     | 117 ++-
 .../stacks/2.0.6/OOZIE/test_service_check.py    |   4 -
 .../common/services-normal-hawq-3-hosts.json    |  11 +
 .../stacks/2.3/common/test_stack_advisor.py     |  34 +
 ambari-server/src/test/python/unitTests.py      |  14 +-
 .../main/admin/stack_and_upgrade_controller.js  |  22 +-
 .../main/alert_definitions_controller.js        |  41 +
 .../controllers/main/host/combo_search_box.js   |  30 +-
 ambari-web/app/controllers/main/service/item.js |   2 +-
 .../app/controllers/wizard/step0_controller.js  |   2 +-
 .../app/controllers/wizard/step3_controller.js  |   3 +-
 .../app/controllers/wizard/step7_controller.js  |   2 +-
 .../mappers/alert_definition_summary_mapper.js  |   8 +-
 ambari-web/app/messages.js                      |   2 +
 .../main/service/configs/config_overridable.js  |   3 +-
 .../app/models/alerts/alert_definition.js       |   1 +
 ambari-web/app/models/host_component.js         |   3 +-
 ambari-web/app/models/service.js                |  43 +
 .../stack_upgrade/stack_upgrade_wizard.hbs      |   8 +-
 ambari-web/app/templates/main/alerts.hbs        |  32 +-
 .../alert_definition/alert_definition_state.hbs |  31 +
 .../alert_definition_summary.hbs                |  28 +
 ambari-web/app/utils/ajax/ajax.js               |  19 +
 ambari-web/app/utils/ember_reopen.js            |  14 +
 ambari-web/app/utils/validator.js               |   2 +-
 ambari-web/app/views.js                         |   2 +
 .../app/views/common/quick_view_link_view.js    |   3 +
 ambari-web/app/views/common/sort_view.js        |  19 +-
 ambari-web/app/views/common/table_view.js       |  24 +-
 .../app/views/main/alert_definitions_view.js    |  23 +-
 .../alert_definition/alert_definition_state.js  |  34 +
 .../alert_definition_summary.js                 |  65 ++
 .../views/main/dashboard/config_history_view.js |  12 +
 .../app/views/main/host/combo_search_box.js     |   1 +
 .../test/controllers/main/service/item_test.js  |  13 +-
 ambari-web/test/utils/validator_test.js         |   1 +
 .../test/views/common/quick_link_view_test.js   |   5 +
 ambari-web/vendor/scripts/visualsearch.js       |   2 +-
 105 files changed, 2726 insertions(+), 1774 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/73aee31e/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog221.java
----------------------------------------------------------------------


[08/39] ambari git commit: AMBARI-14817. ASW with Oozie can't proceed step7 (alexantonenko)

Posted by nc...@apache.org.
AMBARI-14817. ASW with Oozie can't proceed step7
 (alexantonenko)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/d38a9531
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/d38a9531
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/d38a9531

Branch: refs/heads/branch-dev-patch-upgrade
Commit: d38a9531efa667e644ce7b67e041f8f6ad2d06a9
Parents: 75be3b9
Author: Alex Antonenko <hi...@gmail.com>
Authored: Thu Jan 28 04:56:59 2016 +0200
Committer: Alex Antonenko <hi...@gmail.com>
Committed: Thu Jan 28 04:56:59 2016 +0200

----------------------------------------------------------------------
 ambari-web/app/controllers/wizard/step0_controller.js | 2 +-
 ambari-web/app/controllers/wizard/step3_controller.js | 3 ++-
 ambari-web/app/controllers/wizard/step7_controller.js | 2 +-
 3 files changed, 4 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/d38a9531/ambari-web/app/controllers/wizard/step0_controller.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/controllers/wizard/step0_controller.js b/ambari-web/app/controllers/wizard/step0_controller.js
index 4e60d1b..aa3541a 100644
--- a/ambari-web/app/controllers/wizard/step0_controller.js
+++ b/ambari-web/app/controllers/wizard/step0_controller.js
@@ -73,12 +73,12 @@ App.WizardStep0Controller = Em.Controller.extend({
     if(App.router.nextBtnClickInProgress){
       return;
     }
-    App.router.nextBtnClickInProgress = true;
     this.set('hasSubmitted', true);
     if (!this.get('invalidClusterName')) {
       App.clusterStatus.set('clusterName', this.get('content.cluster.name'));
       this.set('content.cluster.status', 'PENDING');
       this.set('content.cluster.isCompleted', false);
+      App.router.nextBtnClickInProgress = true;
       App.router.send('next');
     }
   }

http://git-wip-us.apache.org/repos/asf/ambari/blob/d38a9531/ambari-web/app/controllers/wizard/step3_controller.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/controllers/wizard/step3_controller.js b/ambari-web/app/controllers/wizard/step3_controller.js
index 6fd4563..8ae9068 100644
--- a/ambari-web/app/controllers/wizard/step3_controller.js
+++ b/ambari-web/app/controllers/wizard/step3_controller.js
@@ -1657,17 +1657,18 @@ App.WizardStep3Controller = Em.Controller.extend(App.ReloadPopupMixin, {
     if(App.router.nextBtnClickInProgress){
       return;
     }
-    App.router.nextBtnClickInProgress = true;
     if (this.get('isHostHaveWarnings')) {
       return App.showConfirmationPopup(
         function () {
           self.set('confirmedHosts', self.get('bootHosts'));
+          App.router.nextBtnClickInProgress = true;
           App.router.send('next');
         },
         Em.I18n.t('installer.step3.hostWarningsPopup.hostHasWarnings'));
     }
     else {
       this.set('confirmedHosts', this.get('bootHosts'));
+      App.router.nextBtnClickInProgress = true;
       App.router.send('next');
     }
     return null;

http://git-wip-us.apache.org/repos/asf/ambari/blob/d38a9531/ambari-web/app/controllers/wizard/step7_controller.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/controllers/wizard/step7_controller.js b/ambari-web/app/controllers/wizard/step7_controller.js
index 81e69d6..f6b6cb3 100644
--- a/ambari-web/app/controllers/wizard/step7_controller.js
+++ b/ambari-web/app/controllers/wizard/step7_controller.js
@@ -1638,6 +1638,7 @@ App.WizardStep7Controller = Em.Controller.extend(App.ServerValidatorMixin, App.E
    * Proceed to the next step
    **/
   moveNext: function () {
+    App.router.nextBtnClickInProgress = true;
     App.router.send('next');
     this.set('submitButtonClicked', false);
   },
@@ -1651,7 +1652,6 @@ App.WizardStep7Controller = Em.Controller.extend(App.ServerValidatorMixin, App.E
     if (this.get('isSubmitDisabled') || App.router.nextBtnClickInProgress) {
       return false;
     }
-    App.router.nextBtnClickInProgress = true;
     var preInstallChecksController = App.router.get('preInstallChecksController');
     if (this.get('supportsPreInstallChecks')) {
       if (preInstallChecksController.get('preInstallChecksWhereRun')) {


[33/39] ambari git commit: AMBARI-14858 Can't delete service if install failed.(ababiichuk)

Posted by nc...@apache.org.
AMBARI-14858 Can't delete service if install failed.(ababiichuk)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/40a0ef62
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/40a0ef62
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/40a0ef62

Branch: refs/heads/branch-dev-patch-upgrade
Commit: 40a0ef623d3b213213ac5a8340ed825b535b0234
Parents: e15ed44
Author: ababiichuk <ab...@hortonworks.com>
Authored: Mon Feb 1 11:12:58 2016 +0200
Committer: ababiichuk <ab...@hortonworks.com>
Committed: Mon Feb 1 11:12:58 2016 +0200

----------------------------------------------------------------------
 ambari-web/app/controllers/main/service/item.js |  2 +-
 ambari-web/app/models/service.js                | 43 ++++++++++++++++++++
 .../test/controllers/main/service/item_test.js  | 13 ++++--
 3 files changed, 54 insertions(+), 4 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/40a0ef62/ambari-web/app/controllers/main/service/item.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/controllers/main/service/item.js b/ambari-web/app/controllers/main/service/item.js
index b8ad011..4678c64 100644
--- a/ambari-web/app/controllers/main/service/item.js
+++ b/ambari-web/app/controllers/main/service/item.js
@@ -997,7 +997,7 @@ App.MainServiceItemController = Em.Controller.extend(App.SupportClientConfigsDow
       });
     } else if (dependentServices.length > 0) {
       this.dependentServicesWarning(serviceName, dependentServices);
-    } else if (App.Service.find(serviceName).get('workStatus') === 'INSTALLED') {
+    } else if (App.Service.allowUninstallStates.contains(App.Service.find(serviceName).get('workStatus'))) {
       App.showConfirmationPopup(
         function() {self.confirmDeleteService(serviceName)},
         Em.I18n.t('services.service.delete.popup.warning').format(displayName),

http://git-wip-us.apache.org/repos/asf/ambari/blob/40a0ef62/ambari-web/app/models/service.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/models/service.js b/ambari-web/app/models/service.js
index eba0fe9..39a6b1c 100644
--- a/ambari-web/app/models/service.js
+++ b/ambari-web/app/models/service.js
@@ -158,6 +158,49 @@ App.Service = DS.Model.extend({
 
 });
 
+/**
+ * Map of all service states
+ *
+ * @type {Object}
+ */
+App.Service.statesMap = {
+  init: 'INIT',
+  installing: 'INSTALLING',
+  install_failed: 'INSTALL_FAILED',
+  stopped: 'INSTALLED',
+  starting: 'STARTING',
+  started: 'STARTED',
+  stopping: 'STOPPING',
+  uninstalling: 'UNINSTALLING',
+  uninstalled: 'UNINSTALLED',
+  wiping_out: 'WIPING_OUT',
+  upgrading: 'UPGRADING',
+  maintenance: 'MAINTENANCE',
+  unknown: 'UNKNOWN'
+};
+
+/**
+ * @type {String[]}
+ */
+App.Service.inProgressStates = [
+  App.Service.statesMap.installing,
+  App.Service.statesMap.starting,
+  App.Service.statesMap.stopping,
+  App.Service.statesMap.uninstalling,
+  App.Service.statesMap.upgrading,
+  App.Service.statesMap.wiping_out
+];
+
+/**
+ * @type {String[]}
+ */
+App.Service.allowUninstallStates = [
+  App.Service.statesMap.init,
+  App.Service.statesMap.install_failed,
+  App.Service.statesMap.stopped,
+  App.Service.statesMap.unknown
+];
+
 App.Service.Health = {
   live: "LIVE",
   dead: "DEAD-RED",

http://git-wip-us.apache.org/repos/asf/ambari/blob/40a0ef62/ambari-web/test/controllers/main/service/item_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/controllers/main/service/item_test.js b/ambari-web/test/controllers/main/service/item_test.js
index de09e1a..90a1d90 100644
--- a/ambari-web/test/controllers/main/service/item_test.js
+++ b/ambari-web/test/controllers/main/service/item_test.js
@@ -1250,21 +1250,28 @@ describe('App.MainServiceItemController', function () {
 
     it("service has installed dependent services", function() {
       this.mockDependentServices.returns(['S2']);
-      this.mockService.returns(Em.Object.create({workStatus: 'INSTALLED'}));
+      this.mockService.returns(Em.Object.create({workStatus: App.Service.statesMap.stopped}));
       mainServiceItemController.deleteService('S1');
       expect(mainServiceItemController.dependentServicesWarning.calledWith('S1', ['S2'])).to.be.true;
     });
 
     it("service has not dependent services, and stopped", function() {
       this.mockDependentServices.returns([]);
-      this.mockService.returns(Em.Object.create({workStatus: 'INSTALLED'}));
+      this.mockService.returns(Em.Object.create({workStatus: App.Service.statesMap.stopped}));
+      mainServiceItemController.deleteService('S1');
+      expect(App.showConfirmationPopup.calledOnce).to.be.true;
+    });
+
+    it("service has not dependent services, and install failed", function() {
+      this.mockDependentServices.returns([]);
+      this.mockService.returns(Em.Object.create({workStatus: App.Service.statesMap.install_failed}));
       mainServiceItemController.deleteService('S1');
       expect(App.showConfirmationPopup.calledOnce).to.be.true;
     });
 
     it("service has not dependent services, and not stopped", function() {
       this.mockDependentServices.returns([]);
-      this.mockService.returns(Em.Object.create({workStatus: 'STARTED'}));
+      this.mockService.returns(Em.Object.create({workStatus: App.Service.statesMap.started}));
       mainServiceItemController.deleteService('S1');
       expect(App.ModalPopup.show.calledWith({
         secondary: null,


[13/39] ambari git commit: AMBARI-14825 Metrics collector fails to start after upgrade to 2.2.1 on a secure cluster (dsen)

Posted by nc...@apache.org.
AMBARI-14825 Metrics collector fails to start after upgrade to 2.2.1 on a secure cluster (dsen)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/e72a6b46
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/e72a6b46
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/e72a6b46

Branch: refs/heads/branch-dev-patch-upgrade
Commit: e72a6b4665bceb472a1cc1d9a2080b741d1cb560
Parents: f10d41c
Author: Dmytro Sen <ds...@apache.org>
Authored: Thu Jan 28 20:04:06 2016 +0200
Committer: Dmytro Sen <ds...@apache.org>
Committed: Thu Jan 28 20:04:06 2016 +0200

----------------------------------------------------------------------
 .../java/org/apache/ambari/server/upgrade/UpgradeCatalog221.java | 1 +
 .../org/apache/ambari/server/upgrade/UpgradeCatalog221Test.java  | 4 ++++
 2 files changed, 5 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/e72a6b46/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog221.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog221.java b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog221.java
index 4eebb66..13ec345 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog221.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog221.java
@@ -163,6 +163,7 @@ public class UpgradeCatalog221 extends AbstractUpgradeCatalog {
     updateOozieConfigs();
     updateTezConfigs();
     updateRangerKmsDbksConfigs();
+    updateAMSConfigs();
   }
 
   protected void updateAlerts() {

http://git-wip-us.apache.org/repos/asf/ambari/blob/e72a6b46/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog221Test.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog221Test.java b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog221Test.java
index 5a14bc9..c15fbd8 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog221Test.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog221Test.java
@@ -144,6 +144,7 @@ public class UpgradeCatalog221Test {
     Method updateOozieConfigs = UpgradeCatalog221.class.getDeclaredMethod("updateOozieConfigs");
     Method updateTezConfigs = UpgradeCatalog221.class.getDeclaredMethod("updateTezConfigs");
     Method updateRangerKmsDbksConfigs = UpgradeCatalog221.class.getDeclaredMethod("updateRangerKmsDbksConfigs");
+    Method updateAMSConfigs = UpgradeCatalog221.class.getDeclaredMethod("updateAMSConfigs");
 
     UpgradeCatalog221 upgradeCatalog221 = createMockBuilder(UpgradeCatalog221.class)
       .addMockedMethod(addNewConfigurationsFromXml)
@@ -151,6 +152,7 @@ public class UpgradeCatalog221Test {
       .addMockedMethod(updateOozieConfigs)
       .addMockedMethod(updateTezConfigs)
       .addMockedMethod(updateRangerKmsDbksConfigs)
+      .addMockedMethod(updateAMSConfigs)
       .createMock();
 
     upgradeCatalog221.addNewConfigurationsFromXml();
@@ -163,6 +165,8 @@ public class UpgradeCatalog221Test {
     expectLastCall().once();
     upgradeCatalog221.updateRangerKmsDbksConfigs();
     expectLastCall().once();
+    upgradeCatalog221.updateAMSConfigs();
+    expectLastCall().once();
 
 
     replay(upgradeCatalog221);